You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2016/08/15 07:09:34 UTC

[49/52] [partial] incubator-carbondata git commit: Renamed packages to org.apache.carbondata and fixed errors

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/common/src/test/java/org/carbondata/common/logging/ft/LoggingServiceTest_FT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/carbondata/common/logging/ft/LoggingServiceTest_FT.java b/common/src/test/java/org/carbondata/common/logging/ft/LoggingServiceTest_FT.java
deleted file mode 100644
index 66d1e3f..0000000
--- a/common/src/test/java/org/carbondata/common/logging/ft/LoggingServiceTest_FT.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.common.logging.ft;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStreamReader;
-
-import org.carbondata.common.logging.LogService;
-import org.carbondata.common.logging.LogServiceFactory;
-
-import junit.framework.TestCase;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.MDC;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class LoggingServiceTest_FT extends TestCase {
-
-  private static LogService logger =
-      LogServiceFactory.getLogService(LoggingServiceTest_FT.class.getName());
-
-  @Before public void setUp() throws Exception {
-    MDC.put("MODULE", "Function Test");
-    MDC.put("USER_NAME", "testuser");
-    MDC.put("CLIENT_IP", "127.0.0.1");
-    MDC.put("OPERATRION", "log");
-  }
-
-  @Test public void testIsAuditFileCreated() {
-    File f = new File("./unibiaudit.log");
-    Assert.assertFalse(f.exists());
-  }
-
-  @Test public void testAudit() {
-
-    String expectedAuditLine =
-        "[main] AUDIT [com.huawei.iweb.platform.logging.ft.LoggingServiceTest_FT] 127.0.0.1 "
-            + "testuser Function Test log- audit message created";
-    logger.audit("audit message created");
-
-    LogManager.shutdown();
-
-    try {
-      FileInputStream fstream = new FileInputStream("./unibiaudit.log");
-      BufferedReader br = new BufferedReader(new InputStreamReader(fstream));
-      String actualAuditLine = null;
-      String strLine = null;
-      while ((strLine = br.readLine()) != null) {
-        actualAuditLine = strLine;
-      }
-
-      System.out.println(actualAuditLine);
-
-      if (actualAuditLine != null) {
-        int index = actualAuditLine.indexOf("[main]");
-        actualAuditLine = actualAuditLine.substring(index);
-        Assert.assertEquals(expectedAuditLine, actualAuditLine);
-      } else {
-        Assert.assertTrue(false);
-      }
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-      Assert.assertTrue(!false);
-    } catch (IOException e) {
-      e.printStackTrace();
-      Assert.assertTrue(false);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/common/src/test/java/org/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java b/common/src/test/java/org/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
deleted file mode 100644
index 8aa82d8..0000000
--- a/common/src/test/java/org/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.common.logging.impl;
-
-import org.carbondata.common.logging.impl.AuditExtendedRollingFileAppender;
-import org.carbondata.common.logging.impl.AuditLevel;
-
-import junit.framework.Assert;
-import mockit.Deencapsulation;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class AuditExtendedRollingFileAppenderTest_UT {
-
-  private AuditExtendedRollingFileAppender rAppender = null;
-
-  @Before public void setUp() throws Exception {
-    rAppender = new AuditExtendedRollingFileAppender();
-    Deencapsulation.setField(rAppender, "fileName", "audit.log");
-    Deencapsulation.setField(rAppender, "maxBackupIndex", 1);
-    Deencapsulation.setField(rAppender, "maxFileSize", 1000L);
-
-  }
-
-  @After public void tearDown() throws Exception {
-
-  }
-
-  @Test public void testRollOver() {
-    rAppender.rollOver();
-    rAppender.rollOver();
-    rAppender.rollOver();
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testCleanLogs() {
-    final String startName = "audit";
-    final String folderPath = "./";
-    int maxBackupIndex = 1;
-
-    Deencapsulation.invoke(rAppender, "cleanLogs", startName, folderPath, maxBackupIndex);
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testSubAppendLoggingEvent() {
-    Logger logger = Logger.getLogger(this.getClass());
-    LoggingEvent event = new LoggingEvent(null, logger, 0L, AuditLevel.AUDIT, null, null);
-
-    Deencapsulation.setField(rAppender, "qw", null);
-    try {
-      rAppender.subAppend(event);
-    } catch (Exception e) {
-      //
-    }
-    Assert.assertTrue(true);
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/common/src/test/java/org/carbondata/common/logging/impl/AuditLevelTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/carbondata/common/logging/impl/AuditLevelTest_UT.java b/common/src/test/java/org/carbondata/common/logging/impl/AuditLevelTest_UT.java
deleted file mode 100644
index 4e29d2f..0000000
--- a/common/src/test/java/org/carbondata/common/logging/impl/AuditLevelTest_UT.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.common.logging.impl;
-
-import org.carbondata.common.logging.impl.AuditLevel;
-
-import junit.framework.TestCase;
-import org.apache.log4j.Level;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class AuditLevelTest_UT extends TestCase {
-
-  @Before public void setUp() throws Exception {
-  }
-
-  @After public void tearDown() throws Exception {
-  }
-
-  @Test public void testAuditLevel() {
-    assertEquals(AuditLevel.AUDIT.toInt(), 55000);
-  }
-
-  @Test public void testToLevelIntLevel() {
-    assertSame(AuditLevel.AUDIT, AuditLevel.toLevel(55000, Level.DEBUG));
-  }
-
-  @Test public void testToLevelStringLevel() {
-    assertSame(AuditLevel.AUDIT, AuditLevel.toLevel("AUDIT", Level.DEBUG));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/common/src/test/java/org/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java b/common/src/test/java/org/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
deleted file mode 100644
index 14ccd20..0000000
--- a/common/src/test/java/org/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.common.logging.impl;
-
-import org.carbondata.common.logging.impl.AuditLevel;
-import org.carbondata.common.logging.impl.ExtendedRollingFileAppender;
-
-import junit.framework.Assert;
-import mockit.Deencapsulation;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class ExtendedRollingFileAppenderTest_UT {
-
-  private ExtendedRollingFileAppender rAppender = null;
-
-  @Before public void setUp() throws Exception {
-    rAppender = new ExtendedRollingFileAppender();
-    Deencapsulation.setField(rAppender, "fileName", "dummy.log");
-    Deencapsulation.setField(rAppender, "maxBackupIndex", 1);
-    Deencapsulation.setField(rAppender, "maxFileSize", 1000L);
-  }
-
-  @After public void tearDown() throws Exception {
-  }
-
-  @Test public void testRollOver() {
-    rAppender.rollOver();
-    rAppender.rollOver();
-    rAppender.rollOver();
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testCleanLogs() {
-    final String startName = "dummy";
-    final String folderPath = "./";
-    int maxBackupIndex = 1;
-
-    Deencapsulation.invoke(rAppender, "cleanLogs", startName, folderPath, maxBackupIndex);
-  }
-
-  @Test public void testSubAppendLoggingEvent() {
-    Logger logger = Logger.getLogger(this.getClass());
-    LoggingEvent event = new LoggingEvent(null, logger, 0L, AuditLevel.DEBUG, null, null);
-
-    try {
-      rAppender.subAppend(event);
-    } catch (Exception e) {
-      //
-    }
-    Assert.assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/common/src/test/java/org/carbondata/common/logging/impl/FileUtilTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/carbondata/common/logging/impl/FileUtilTest_UT.java b/common/src/test/java/org/carbondata/common/logging/impl/FileUtilTest_UT.java
deleted file mode 100644
index 900cd0e..0000000
--- a/common/src/test/java/org/carbondata/common/logging/impl/FileUtilTest_UT.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.common.logging.impl;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-
-import junit.framework.TestCase;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class FileUtilTest_UT extends TestCase {
-
-  /**
-   * @throws Exception
-   */
-  @Before public void setUp() throws Exception {
-    File f = new File("myfile.txt");
-    if (!f.exists()) {
-      f.createNewFile();
-    }
-  }
-
-  /**
-   * @throws Exception
-   */
-  @After public void tearDown() throws Exception {
-    File f = new File("myfile.txt");
-    if (f.exists()) {
-      f.delete();
-    }
-  }
-
-  @Test public void testClose() {
-    try {
-      FileInputStream in = new FileInputStream(new File("myfile.txt"));
-      FileUtil.close(in);
-      assertTrue(true);
-    } catch (FileNotFoundException e) {
-      assertTrue(false);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/common/src/test/java/org/carbondata/common/logging/impl/StandardLogServiceTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/carbondata/common/logging/impl/StandardLogServiceTest_UT.java b/common/src/test/java/org/carbondata/common/logging/impl/StandardLogServiceTest_UT.java
deleted file mode 100644
index 4c9f0a3..0000000
--- a/common/src/test/java/org/carbondata/common/logging/impl/StandardLogServiceTest_UT.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.common.logging.impl;
-
-import org.carbondata.common.logging.impl.StandardLogService;
-
-import junit.framework.TestCase;
-import mockit.Mock;
-import mockit.MockUp;
-import org.apache.log4j.Category;
-import org.apache.log4j.Priority;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class StandardLogServiceTest_UT extends TestCase {
-
-  private StandardLogService logService = null;
-
-  /**
-   * @throws Exception
-   */
-  @Before public void setUp() throws Exception {
-
-    new MockUp<Category>() {
-      @SuppressWarnings("unused")
-      @Mock public boolean isDebugEnabled() {
-        return true;
-      }
-
-      @SuppressWarnings("unused")
-      @Mock public boolean isEnabledFor(Priority level) {
-        return true;
-      }
-
-      @SuppressWarnings("unused")
-      @Mock public boolean isInfoEnabled() {
-        return true;
-      }
-    };
-
-    logService = new StandardLogService(this.getClass().getName());
-  }
-
-  /**
-   * @throws Exception
-   * @Author k00742797
-   * @Description : tearDown
-   */
-  @After public void tearDown() throws Exception {
-  }
-
-  @Test public void testStandardLogService() {
-    if (logService != null && logService instanceof StandardLogService) {
-      Assert.assertTrue(true);
-    } else {
-      Assert.assertTrue(false);
-    }
-  }
-
-  @Test public void testIsDebugEnabled() {
-    Assert.assertEquals(true, logService.isDebugEnabled());
-  }
-
-  @Test public void testIsWarnEnabled() {
-    Assert.assertEquals(true, logService.isWarnEnabled());
-  }
-
-  @Test public void testSecureLogEventObjectArray() {
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testAuditLogEventObjectArray() {
-    logService.audit("testing");
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testDebugLogEventObjectArray() {
-    logService.debug("testing");
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testErrorLogEventObjectArray() {
-    logService.error("testing");
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testErrorLogEventThrowableObjectArray() {
-    Exception exception = new Exception("test");
-    logService.error(exception);
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testErrorLogEventThrowableMessage() {
-    Exception exception = new Exception("test");
-    logService.error(exception, "additional message");
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testInfoLogEventObjectArray() {
-    logService.info("testing");
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testIsInfoEnabled() {
-    Assert.assertEquals(true, logService.isInfoEnabled());
-  }
-
-  @Test public void testDeleteLogs() {
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testFlushLogs() {
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testSetEventProperties() {
-    logService.setEventProperties("CLIENT_IP", "127.0.0.1");
-    Assert.assertTrue(true);
-  }
-
-  @Test public void testIsDoLog() {
-    StandardLogService.setDoLog(true);
-    Assert.assertEquals(true, StandardLogService.isDoLog());
-
-    StandardLogService.setDoLog(false);
-    Assert.assertEquals(false, StandardLogService.isDoLog());
-
-  }
-
-  @Test public void testSetDoLog() {
-    StandardLogService.setDoLog(true);
-    Assert.assertEquals(true, StandardLogService.isDoLog());
-  }
-
-  @Test public void testAuditString() {
-    logService.audit("audit message");
-    Assert.assertTrue(true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/common/ext/ColumnUniqueIdGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/common/ext/ColumnUniqueIdGenerator.java b/core/src/main/java/org/apache/carbondata/common/ext/ColumnUniqueIdGenerator.java
new file mode 100644
index 0000000..577a5ed
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/common/ext/ColumnUniqueIdGenerator.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.common.ext;
+
+import java.util.UUID;
+
+import org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.service.ColumnUniqueIdService;
+
+/**
+ * It returns unique id given column
+ */
+public class ColumnUniqueIdGenerator implements ColumnUniqueIdService {
+
+  private static ColumnUniqueIdService columnUniqueIdService = new ColumnUniqueIdGenerator();
+
+  @Override public String generateUniqueId(String databaseName, ColumnSchema columnSchema) {
+    return UUID.randomUUID().toString();
+  }
+
+  public static ColumnUniqueIdService getInstance() {
+    return columnUniqueIdService;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/common/ext/DictionaryFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/common/ext/DictionaryFactory.java b/core/src/main/java/org/apache/carbondata/common/ext/DictionaryFactory.java
new file mode 100644
index 0000000..3cf1ad5
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/common/ext/DictionaryFactory.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.common.ext;
+
+import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
+import org.apache.carbondata.core.carbon.ColumnIdentifier;
+import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReader;
+import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReaderImpl;
+import org.apache.carbondata.core.reader.CarbonDictionaryReader;
+import org.apache.carbondata.core.reader.CarbonDictionaryReaderImpl;
+import org.apache.carbondata.core.reader.sortindex.CarbonDictionarySortIndexReader;
+import org.apache.carbondata.core.reader.sortindex.CarbonDictionarySortIndexReaderImpl;
+import org.apache.carbondata.core.service.DictionaryService;
+import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
+import org.apache.carbondata.core.writer.CarbonDictionaryWriterImpl;
+import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriter;
+import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriterImpl;
+
+/**
+ * service to get dictionary reader and writer
+ */
+public class DictionaryFactory implements DictionaryService {
+
+  private static DictionaryService dictService = new DictionaryFactory();
+
+  /**
+   * get dictionary writer
+   *
+   * @param carbonTableIdentifier
+   * @param columnIdentifier
+   * @param carbonStorePath
+   * @return
+   */
+  @Override public CarbonDictionaryWriter getDictionaryWriter(
+      CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier,
+      String carbonStorePath) {
+    return new CarbonDictionaryWriterImpl(carbonStorePath, carbonTableIdentifier, columnIdentifier);
+  }
+
+  /**
+   * get dictionary sort index writer
+   *
+   * @param carbonTableIdentifier
+   * @param columnIdentifier
+   * @param carbonStorePath
+   * @return
+   */
+  @Override public CarbonDictionarySortIndexWriter getDictionarySortIndexWriter(
+      CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier,
+      String carbonStorePath) {
+    return new CarbonDictionarySortIndexWriterImpl(carbonTableIdentifier, columnIdentifier,
+        carbonStorePath);
+  }
+
+  /**
+   * get dictionary metadata reader
+   *
+   * @param carbonTableIdentifier
+   * @param columnIdentifier
+   * @param carbonStorePath
+   * @return
+   */
+  @Override public CarbonDictionaryMetadataReader getDictionaryMetadataReader(
+      CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier,
+      String carbonStorePath) {
+    return new CarbonDictionaryMetadataReaderImpl(carbonStorePath, carbonTableIdentifier,
+        columnIdentifier);
+  }
+
+  /**
+   * get dictionary reader
+   *
+   * @param carbonTableIdentifier
+   * @param columnIdentifier
+   * @param carbonStorePath
+   * @return
+   */
+  @Override public CarbonDictionaryReader getDictionaryReader(
+      CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier,
+      String carbonStorePath) {
+    return new CarbonDictionaryReaderImpl(carbonStorePath, carbonTableIdentifier, columnIdentifier);
+  }
+
+  /**
+   * get dictionary sort index reader
+   *
+   * @param carbonTableIdentifier
+   * @param columnIdentifier
+   * @param carbonStorePath
+   * @return
+   */
+  @Override public CarbonDictionarySortIndexReader getDictionarySortIndexReader(
+      CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier,
+      String carbonStorePath) {
+    return new CarbonDictionarySortIndexReaderImpl(carbonTableIdentifier, columnIdentifier,
+        carbonStorePath);
+  }
+
+  public static DictionaryService getInstance() {
+    return dictService;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/common/ext/PathFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/common/ext/PathFactory.java b/core/src/main/java/org/apache/carbondata/common/ext/PathFactory.java
new file mode 100644
index 0000000..e5ff83a
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/common/ext/PathFactory.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.common.ext;
+
+import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
+import org.apache.carbondata.core.carbon.ColumnIdentifier;
+import org.apache.carbondata.core.carbon.path.CarbonStorePath;
+import org.apache.carbondata.core.carbon.path.CarbonTablePath;
+import org.apache.carbondata.core.service.PathService;
+
+/**
+ * Create helper to get path details
+ */
+public class PathFactory implements PathService {
+
+  private static PathService pathService = new PathFactory();
+
+  /**
+   * @param columnIdentifier
+   * @param storeLocation
+   * @param tableIdentifier
+   * @return store path related to tables
+   */
+  @Override public CarbonTablePath getCarbonTablePath(ColumnIdentifier columnIdentifier,
+      String storeLocation, CarbonTableIdentifier tableIdentifier) {
+    return CarbonStorePath.getCarbonTablePath(storeLocation, tableIdentifier);
+  }
+
+  public static PathService getInstance() {
+    return pathService;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/common/factory/CarbonCommonFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/common/factory/CarbonCommonFactory.java b/core/src/main/java/org/apache/carbondata/common/factory/CarbonCommonFactory.java
new file mode 100644
index 0000000..09b4465
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/common/factory/CarbonCommonFactory.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.common.factory;
+
+import org.apache.carbondata.common.ext.ColumnUniqueIdGenerator;
+import org.apache.carbondata.common.ext.DictionaryFactory;
+import org.apache.carbondata.common.ext.PathFactory;
+import org.apache.carbondata.core.service.ColumnUniqueIdService;
+import org.apache.carbondata.core.service.DictionaryService;
+import org.apache.carbondata.core.service.PathService;
+
+/**
+ * Interface to get services
+ */
+public class CarbonCommonFactory {
+
+  /**
+   * @return dictionary service
+   */
+  public static DictionaryService getDictionaryService() {
+    return DictionaryFactory.getInstance();
+  }
+
+  /**
+   * @return path service
+   */
+  public static PathService getPathService() {
+    return PathFactory.getInstance();
+  }
+
+  /**
+   * @return unique id generator
+   */
+  public static ColumnUniqueIdService getColumnUniqueIdGenerator() {
+    return ColumnUniqueIdGenerator.getInstance();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/Cache.java b/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
new file mode 100644
index 0000000..b519deb
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache;
+
+import java.util.List;
+
+import org.apache.carbondata.core.util.CarbonUtilException;
+
+/**
+ * A semi-persistent mapping from keys to values. Cache entries are manually added using
+ * #get(Key), #getAll(List<Keys>) , and are stored in the cache until
+ * either evicted or manually invalidated.
+ * Implementations of this interface are expected to be thread-safe, and can be safely accessed
+ * by multiple concurrent threads.
+ */
+public interface Cache<K, V> {
+
+  /**
+   * This method will get the value for the given key. If value does not exist
+   * for the given key, it will check and load the value.
+   *
+   * @param key
+   * @return
+   * @throws CarbonUtilException in case memory is not sufficient to load data into memory
+   */
+  V get(K key) throws CarbonUtilException;
+
+  /**
+   * This method will return a list of values for the given list of keys.
+   * For each key, this method will check and load the data if required.
+   *
+   * @param keys
+   * @return
+   * @throws CarbonUtilException in case memory is not sufficient to load data into memory
+   */
+  List<V> getAll(List<K> keys) throws CarbonUtilException;
+
+  /**
+   * This method will return the value for the given key. It will not check and load
+   * the data for the given key
+   *
+   * @param key
+   * @return
+   */
+  V getIfPresent(K key);
+
+  /**
+   * This method will remove the cache for a given key
+   *
+   * @param key
+   */
+  void invalidate(K key);
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java b/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
new file mode 100644
index 0000000..fa505bf
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
+import org.apache.carbondata.core.cache.dictionary.ForwardDictionaryCache;
+import org.apache.carbondata.core.cache.dictionary.ReverseDictionaryCache;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+
+/**
+ * Cache provider class which will create a cache based on given type
+ */
+public class CacheProvider {
+
+  /**
+   * cache provider instance
+   */
+  private static CacheProvider cacheProvider = new CacheProvider();
+
+  /**
+   * a map that will hold the entry for cache type to cache object mapping
+   */
+  private Map<CacheType, Cache> cacheTypeToCacheMap =
+      new HashMap<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+
+  /**
+   * a map that will hold the mapping of cache type to LRU cache instance
+   */
+  private Map<CacheType, CarbonLRUCache> cacheTypeToLRUCacheMap =
+      new HashMap<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+
+  /**
+   * object lock instance to be used in synchronization block
+   */
+  private final Object lock = new Object();
+
+  /**
+   * private constructor to follow singleton design pattern for this class
+   */
+  private CacheProvider() {
+
+  }
+
+  /**
+   * @return cache provider instance
+   */
+  public static CacheProvider getInstance() {
+    return cacheProvider;
+  }
+
+  /**
+   * This method will check if a cache already exists for given cache type and create in case
+   * it is not present in the map
+   *
+   * @param cacheType       type of cache
+   * @param carbonStorePath store path
+   * @param <K>
+   * @param <V>
+   * @return
+   */
+  public <K, V> Cache<K, V> createCache(CacheType cacheType, String carbonStorePath) {
+    //check if lru cache is null, if null create one
+    //check if cache is null for given cache type, if null create one
+    if (!dictionaryCacheAlreadyExists(cacheType)) {
+      synchronized (lock) {
+        if (!dictionaryCacheAlreadyExists(cacheType)) {
+          if (null == cacheTypeToLRUCacheMap.get(cacheType)) {
+            createLRULevelCacheInstance(cacheType);
+          }
+          createDictionaryCacheForGivenType(cacheType, carbonStorePath);
+        }
+      }
+    }
+    return cacheTypeToCacheMap.get(cacheType);
+  }
+
+  /**
+   * This method will create the cache for given cache type
+   *
+   * @param cacheType       type of cache
+   * @param carbonStorePath store path
+   */
+  private void createDictionaryCacheForGivenType(CacheType cacheType, String carbonStorePath) {
+    Cache cacheObject = null;
+    if (cacheType.equals(CacheType.REVERSE_DICTIONARY)) {
+      cacheObject =
+          new ReverseDictionaryCache<DictionaryColumnUniqueIdentifier, Dictionary>(carbonStorePath,
+              cacheTypeToLRUCacheMap.get(cacheType));
+    } else if (cacheType.equals(CacheType.FORWARD_DICTIONARY)) {
+      cacheObject =
+          new ForwardDictionaryCache<DictionaryColumnUniqueIdentifier, Dictionary>(carbonStorePath,
+              cacheTypeToLRUCacheMap.get(cacheType));
+    }
+    cacheTypeToCacheMap.put(cacheType, cacheObject);
+  }
+
+  /**
+   * This method will create the lru cache instance based on the given type
+   *
+   * @param cacheType
+   */
+  private void createLRULevelCacheInstance(CacheType cacheType) {
+    CarbonLRUCache carbonLRUCache = null;
+    // if cache type is dictionary cache, then same lru cache instance has to be shared
+    // between forward and reverse cache
+    if (cacheType.equals(CacheType.REVERSE_DICTIONARY) || cacheType
+        .equals(CacheType.FORWARD_DICTIONARY)) {
+      carbonLRUCache = new CarbonLRUCache(CarbonCommonConstants.CARBON_MAX_LEVEL_CACHE_SIZE,
+          CarbonCommonConstants.CARBON_MAX_LEVEL_CACHE_SIZE_DEFAULT);
+      cacheTypeToLRUCacheMap.put(CacheType.REVERSE_DICTIONARY, carbonLRUCache);
+      cacheTypeToLRUCacheMap.put(CacheType.FORWARD_DICTIONARY, carbonLRUCache);
+    }
+  }
+
+  /**
+   * This method will check whether the map already has an entry for
+   * given cache type
+   *
+   * @param cacheType
+   * @return
+   */
+  private boolean dictionaryCacheAlreadyExists(CacheType cacheType) {
+    return null != cacheTypeToCacheMap.get(cacheType);
+  }
+
+  /**
+   * Below method will be used to clear the cache
+   */
+  public void dropAllCache() {
+    cacheTypeToLRUCacheMap.clear();
+    cacheTypeToCacheMap.clear();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java b/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
new file mode 100644
index 0000000..ea511e9
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CacheType.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache;
+
+import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
+
+/**
+ * class which defines different cache types. cache type can be dictionary cache for
+ * forward (surrogate key to byte array mapping) and reverse (byte array to
+ * surrogate mapping) dictionary or a B-tree cache
+ */
+public class CacheType<K, V> {
+
+  /**
+   * Forward dictionary cache which maintains surrogate key to byte array mapping
+   */
+  public static final CacheType<DictionaryColumnUniqueIdentifier, Dictionary> FORWARD_DICTIONARY =
+      new CacheType("forward_dictionary");
+
+  /**
+   * Reverse dictionary cache which maintains byte array to surrogate key mapping
+   */
+  public static final CacheType<DictionaryColumnUniqueIdentifier, Dictionary> REVERSE_DICTIONARY =
+      new CacheType("reverse_dictionary");
+
+  /**
+   * cacheName which is unique name for a cache
+   */
+  private String cacheName;
+
+  /**
+   * @param cacheName
+   */
+  private CacheType(String cacheName) {
+    this.cacheName = cacheName;
+  }
+
+  /**
+   * @return cache unique name
+   */
+  public String getCacheName() {
+    return cacheName;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java b/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java
new file mode 100644
index 0000000..1259fe3
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache;
+
+/**
+ * interface which declares methods which will decide whether to keep
+ * cacheable objects in memory
+ */
+public interface Cacheable {
+
+  /**
+   * This method will return the timestamp of file based on which decision
+   * the decision will be taken whether to read that file or not
+   *
+   * @return
+   */
+  long getFileTimeStamp();
+
+  /**
+   * This method will return the access count for a column based on which decision will be taken
+   * whether to keep the object in memory
+   *
+   * @return
+   */
+  int getAccessCount();
+
+  /**
+   * This method will return the memory size of a column
+   *
+   * @return
+   */
+  long getMemorySize();
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java b/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
new file mode 100644
index 0000000..4ba38e4
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
@@ -0,0 +1,251 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+/**
+ * class which manages the lru cache
+ */
+public final class CarbonLRUCache {
+  /**
+   * constant for converting MB into bytes
+   */
+  private static final int BYTE_CONVERSION_CONSTANT = 1024 * 1024;
+  /**
+   * Attribute for Carbon LOGGER
+   */
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(CarbonLRUCache.class.getName());
+  /**
+   * Map that will contain key as table unique name and value as cache Holder
+   * object
+   */
+  private Map<String, Cacheable> lruCacheMap;
+  /**
+   * lruCacheSize
+   */
+  private long lruCacheMemorySize;
+  /**
+   * totalSize size of the cache
+   */
+  private long currentSize;
+
+  /**
+   * @param propertyName        property name to take the size configured
+   * @param defaultPropertyName default property in case size is not configured
+   */
+  public CarbonLRUCache(String propertyName, String defaultPropertyName) {
+    try {
+      lruCacheMemorySize = Integer
+          .parseInt(CarbonProperties.getInstance().getProperty(propertyName, defaultPropertyName));
+    } catch (NumberFormatException e) {
+      lruCacheMemorySize = Integer.parseInt(defaultPropertyName);
+    }
+    initCache();
+    if (lruCacheMemorySize > 0) {
+      LOGGER.info("Configured level cahce size is " + lruCacheMemorySize + " MB");
+      // convert in bytes
+      lruCacheMemorySize = lruCacheMemorySize * BYTE_CONVERSION_CONSTANT;
+    } else {
+      LOGGER.info("Column cache size not configured. Therefore default behavior will be "
+              + "considered and no LRU based eviction of columns will be done");
+    }
+  }
+
+  /**
+   * initialize lru cache
+   */
+  private void initCache() {
+    lruCacheMap =
+        new LinkedHashMap<String, Cacheable>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE, 1.0f,
+            true);
+  }
+
+  /**
+   * This method will give the list of all the keys that can be deleted from
+   * the level LRU cache
+   */
+  private List<String> getKeysToBeRemoved(long size) {
+    List<String> toBeDeletedKeys =
+        new ArrayList<String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+    long removedSize = 0;
+    for (Entry<String, Cacheable> entry : lruCacheMap.entrySet()) {
+      String key = entry.getKey();
+      Cacheable cacheInfo = entry.getValue();
+      long memorySize = cacheInfo.getMemorySize();
+      if (canBeRemoved(cacheInfo)) {
+        removedSize = removedSize + memorySize;
+        toBeDeletedKeys.add(key);
+        // check if after removing the current file size, required
+        // size when added to current size is sufficient to load a
+        // level or not
+        if (lruCacheMemorySize >= (currentSize - memorySize + size)) {
+          toBeDeletedKeys.clear();
+          toBeDeletedKeys.add(key);
+          removedSize = memorySize;
+          break;
+        }
+        // check if after removing the added size/removed size,
+        // required size when added to current size is sufficient to
+        // load a level or not
+        else if (lruCacheMemorySize >= (currentSize - removedSize + size)) {
+          break;
+        }
+      }
+    }
+    // this case will come when iteration is complete over the keys but
+    // still size is not sufficient for level file to be loaded, then we
+    // will not delete any of the keys
+    if ((currentSize - removedSize + size) > lruCacheMemorySize) {
+      toBeDeletedKeys.clear();
+    }
+    return toBeDeletedKeys;
+  }
+
+  /**
+   * @param cacheInfo
+   * @return
+   */
+  private boolean canBeRemoved(Cacheable cacheInfo) {
+    if (cacheInfo.getAccessCount() > 0) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * @param key
+   */
+  public void remove(String key) {
+    synchronized (lruCacheMap) {
+      removeKey(key);
+    }
+  }
+
+  /**
+   * This method will remove the key from lru cache
+   *
+   * @param key
+   */
+  private void removeKey(String key) {
+    Cacheable cacheable = lruCacheMap.get(key);
+    if (null != cacheable) {
+      currentSize = currentSize - cacheable.getMemorySize();
+    }
+    lruCacheMap.remove(key);
+    LOGGER.info("Removed level entry from InMemory level lru cache :: " + key);
+  }
+
+  /**
+   * This method will check if required size is available in the memory and then add
+   * the given cacheable to object to lru cache
+   *
+   * @param columnIdentifier
+   * @param cacheInfo
+   */
+  public boolean put(String columnIdentifier, Cacheable cacheInfo, long requiredSize) {
+    boolean columnKeyAddedSuccessfully = false;
+    if (freeMemorySizeForAddingCache(requiredSize)) {
+      synchronized (lruCacheMap) {
+        currentSize = currentSize + requiredSize;
+        if (null == lruCacheMap.get(columnIdentifier)) {
+          lruCacheMap.put(columnIdentifier, cacheInfo);
+        }
+        columnKeyAddedSuccessfully = true;
+      }
+      LOGGER.debug("Added level entry to InMemory level lru cache :: " + columnIdentifier);
+    } else {
+      LOGGER.error("Size not available. Column cannot be added to level lru cache :: "
+          + columnIdentifier + " .Required Size = " + requiredSize + " Size available "
+          + (lruCacheMemorySize - currentSize));
+    }
+    return columnKeyAddedSuccessfully;
+  }
+
+  /**
+   * This method will check a required column can be loaded into memory or not. If required
+   * this method will call for eviction of existing data from memory
+   *
+   * @param requiredSize
+   * @return
+   */
+  private boolean freeMemorySizeForAddingCache(long requiredSize) {
+    boolean memoryAvailable = false;
+    if (lruCacheMemorySize > 0) {
+      if (isSizeAvailableToLoadColumnDictionary(requiredSize)) {
+        memoryAvailable = true;
+      } else {
+        synchronized (lruCacheMap) {
+          // get the keys that can be removed from memory
+          List<String> keysToBeRemoved = getKeysToBeRemoved(requiredSize);
+          for (String cacheKey : keysToBeRemoved) {
+            removeKey(cacheKey);
+          }
+          // after removing the keys check again if required size is available
+          if (isSizeAvailableToLoadColumnDictionary(requiredSize)) {
+            memoryAvailable = true;
+          }
+        }
+      }
+    } else {
+      memoryAvailable = true;
+    }
+    return memoryAvailable;
+  }
+
+  /**
+   * This method will check if size is available to laod dictionary into memory
+   *
+   * @param requiredSize
+   * @return
+   */
+  private boolean isSizeAvailableToLoadColumnDictionary(long requiredSize) {
+    return lruCacheMemorySize >= (currentSize + requiredSize);
+  }
+
+  /**
+   * @param key
+   * @return
+   */
+  public Cacheable get(String key) {
+    synchronized (lruCacheMap) {
+      return lruCacheMap.get(key);
+    }
+  }
+
+  /**
+   * This method will empty the level cache
+   */
+  public void clear() {
+    synchronized (lruCacheMap) {
+      lruCacheMap.clear();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
new file mode 100644
index 0000000..a62695c
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
@@ -0,0 +1,279 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache.dictionary;
+
+import java.nio.charset.Charset;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+
+/**
+ * class that implements cacheable interface and methods specific to column dictionary
+ */
+public abstract class AbstractColumnDictionaryInfo implements DictionaryInfo {
+
+  /**
+   * list that will hold all the dictionary chunks for one column
+   */
+  protected List<List<byte[]>> dictionaryChunks = new CopyOnWriteArrayList<>();
+
+  /**
+   * minimum value of surrogate key, dictionary value key will start from count 1
+   */
+  protected static final int MINIMUM_SURROGATE_KEY = 1;
+
+  /**
+   * atomic integer to maintain the access count for a column access
+   */
+  protected AtomicInteger accessCount = new AtomicInteger();
+
+  /**
+   * file timestamp
+   */
+  protected long fileTimeStamp;
+
+  /**
+   * offset till where file is read
+   */
+  protected long offsetTillFileIsRead;
+
+  /**
+   * length of dictionary metadata file
+   */
+  private long dictionaryMetaFileLength;
+
+  /**
+   * This method will return the timestamp of file based on which decision
+   * the decision will be taken whether to read that file or not
+   *
+   * @return
+   */
+  @Override public long getFileTimeStamp() {
+    return fileTimeStamp;
+  }
+
+  /**
+   * This method will return the access count for a column based on which decision will be taken
+   * whether to keep the object in memory
+   *
+   * @return
+   */
+  @Override public int getAccessCount() {
+    return accessCount.get();
+  }
+
+  /**
+   * This method will return the memory size of a column
+   *
+   * @return
+   */
+  @Override public long getMemorySize() {
+    return offsetTillFileIsRead;
+  }
+
+  /**
+   * This method will increment the access count for a column by 1
+   * whenever a column is getting used in query or incremental data load
+   */
+  @Override public void incrementAccessCount() {
+    accessCount.incrementAndGet();
+  }
+
+  /**
+   * This method will decrement the access count for a column by 1
+   * whenever a column usage is complete
+   */
+  private void decrementAccessCount() {
+    if (accessCount.get() > 0) {
+      accessCount.decrementAndGet();
+    }
+  }
+
+  /**
+   * This method will update the end offset of file everytime a file is read
+   *
+   * @param offsetTillFileIsRead
+   */
+  @Override public void setOffsetTillFileIsRead(long offsetTillFileIsRead) {
+    this.offsetTillFileIsRead = offsetTillFileIsRead;
+  }
+
+  /**
+   * This method will update the timestamp of a file if a file is modified
+   * like in case of incremental load
+   *
+   * @param fileTimeStamp
+   */
+  @Override public void setFileTimeStamp(long fileTimeStamp) {
+    this.fileTimeStamp = fileTimeStamp;
+  }
+
+  /**
+   * The method return the list of dictionary chunks of a column
+   * Applications Scenario.
+   * For preparing the column Sort info while writing the sort index file.
+   *
+   * @return
+   */
+  @Override public DictionaryChunksWrapper getDictionaryChunks() {
+    DictionaryChunksWrapper chunksWrapper = new DictionaryChunksWrapper(dictionaryChunks);
+    return chunksWrapper;
+  }
+
+  /**
+   * This method will release the objects and set default value for primitive types
+   */
+  @Override public void clear() {
+    decrementAccessCount();
+  }
+
+  /**
+   * This method will find and return the sort index for a given dictionary id.
+   * Applicable scenarios:
+   * 1. Used in case of order by queries when data sorting is required
+   *
+   * @param surrogateKey a unique ID for a dictionary value
+   * @return if found returns key else 0
+   */
+  @Override public int getSortedIndex(int surrogateKey) {
+    return 0;
+  }
+
+  /**
+   * dictionary metadata file length which will be set whenever we reload dictionary
+   * data from disk
+   *
+   * @param dictionaryMetaFileLength length of dictionary metadata file
+   */
+  @Override public void setDictionaryMetaFileLength(long dictionaryMetaFileLength) {
+    this.dictionaryMetaFileLength = dictionaryMetaFileLength;
+  }
+
+  /**
+   * Dictionary meta file offset which will be read to check whether length of dictionary
+   * meta file has been modified
+   *
+   * @return
+   */
+  @Override public long getDictionaryMetaFileLength() {
+    return dictionaryMetaFileLength;
+  }
+
+  /**
+   * This method will find and return the dictionary value from sorted index.
+   * Applicable scenarios:
+   * 1. Query final result preparation in case of order by queries:
+   * While convert the final result which will
+   * be surrogate key back to original dictionary values this method will be used
+   *
+   * @param sortedIndex sort index of dictionary value
+   * @return value if found else null
+   */
+  @Override public String getDictionaryValueFromSortedIndex(int sortedIndex) {
+    return null;
+  }
+
+  /**
+   * This method will set the sort order index of a dictionary column.
+   * Sort order index if the index of dictionary values after they are sorted.
+   *
+   * @param sortOrderIndex
+   */
+  @Override public void setSortOrderIndex(List<Integer> sortOrderIndex) {
+  }
+
+  /**
+   * This method will set the sort reverse index of a dictionary column.
+   * Sort reverse index is the index of dictionary values before they are sorted.
+   *
+   * @param sortReverseOrderIndex
+   */
+  @Override public void setSortReverseOrderIndex(List<Integer> sortReverseOrderIndex) {
+  }
+
+  /**
+   * This method will find and return the dictionary value for a given surrogate key.
+   * Applicable scenarios:
+   * 1. Query final result preparation : While convert the final result which will
+   * be surrogate key back to original dictionary values this method will be used
+   *
+   * @param surrogateKey a unique ID for a dictionary value
+   * @return value if found else null
+   */
+  @Override public String getDictionaryValueForKey(int surrogateKey) {
+    String dictionaryValue = null;
+    if (surrogateKey < MINIMUM_SURROGATE_KEY) {
+      return dictionaryValue;
+    }
+    byte[] dictionaryValueInBytes = getDictionaryBytesFromSurrogate(surrogateKey);
+    if (null != dictionaryValueInBytes) {
+      dictionaryValue = new String(dictionaryValueInBytes,
+          Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+    }
+    return dictionaryValue;
+  }
+
+  /**
+   * This method will find and return the dictionary value as byte array for a
+   * given surrogate key
+   *
+   * @param surrogateKey
+   * @return
+   */
+  protected byte[] getDictionaryBytesFromSurrogate(int surrogateKey) {
+    byte[] dictionaryValueInBytes = null;
+    int totalSizeOfDictionaryChunksTraversed = 0;
+    for (List<byte[]> oneDictionaryChunk : dictionaryChunks) {
+      totalSizeOfDictionaryChunksTraversed =
+          totalSizeOfDictionaryChunksTraversed + oneDictionaryChunk.size();
+      // skip the dictionary chunk till surrogate key is lesser than size of
+      // dictionary chunks traversed
+      if (totalSizeOfDictionaryChunksTraversed < surrogateKey) {
+        continue;
+      }
+      // lets say surrogateKey = 26, total size traversed is 28, dictionary chunk size = 12
+      // then surrogate position in dictionary chunk list is = 26 - (28-12) - 1 = 9
+      // -1 because list index starts from 0
+      int surrogatePositionInDictionaryChunk =
+          surrogateKey - (totalSizeOfDictionaryChunksTraversed - oneDictionaryChunk.size()) - 1;
+      dictionaryValueInBytes = oneDictionaryChunk.get(surrogatePositionInDictionaryChunk);
+      break;
+    }
+    return dictionaryValueInBytes;
+  }
+
+  /**
+   * This method will find and return the surrogate key for a given dictionary value
+   * Applicable scenario:
+   * 1. Incremental data load : Dictionary will not be generated for existing values. For
+   * that values have to be looked up in the existing dictionary cache.
+   * 2. Filter scenarios where from value surrogate key has to be found.
+   *
+   * @param value dictionary value
+   * @return if found returns key else 0
+   */
+  @Override public int getSurrogateKey(String value) {
+    byte[] keyData = value.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+    return getSurrogateKey(keyData);
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
new file mode 100644
index 0000000..01dd269
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
@@ -0,0 +1,297 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache.dictionary;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.carbondata.common.factory.CarbonCommonFactory;
+import org.apache.carbondata.core.cache.Cache;
+import org.apache.carbondata.core.cache.CacheType;
+import org.apache.carbondata.core.cache.CarbonLRUCache;
+import org.apache.carbondata.core.carbon.path.CarbonTablePath;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.reader.CarbonDictionaryColumnMetaChunk;
+import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReader;
+import org.apache.carbondata.core.service.DictionaryService;
+import org.apache.carbondata.core.service.PathService;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.CarbonUtilException;
+
+/**
+ * Abstract class which implements methods common to reverse and forward dictionary cache
+ */
+public abstract class AbstractDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
+    V extends Dictionary>
+    implements Cache<DictionaryColumnUniqueIdentifier, Dictionary> {
+
+  /**
+   * thread pool size to be used for dictionary data reading
+   */
+  protected int thread_pool_size;
+
+  /**
+   * LRU cache variable
+   */
+  protected CarbonLRUCache carbonLRUCache;
+
+  /**
+   * c store path
+   */
+  protected String carbonStorePath;
+
+  /**
+   * @param carbonStorePath
+   * @param carbonLRUCache
+   */
+  public AbstractDictionaryCache(String carbonStorePath, CarbonLRUCache carbonLRUCache) {
+    this.carbonStorePath = carbonStorePath;
+    this.carbonLRUCache = carbonLRUCache;
+    initThreadPoolSize();
+  }
+
+  /**
+   * This method will initialize the thread pool size to be used for creating the
+   * max number of threads for a job
+   */
+  private void initThreadPoolSize() {
+    try {
+      thread_pool_size = Integer.parseInt(CarbonProperties.getInstance()
+          .getProperty(CarbonCommonConstants.NUM_CORES_LOADING,
+              CarbonCommonConstants.NUM_CORES_DEFAULT_VAL));
+    } catch (NumberFormatException e) {
+      thread_pool_size = Integer.parseInt(CarbonCommonConstants.NUM_CORES_DEFAULT_VAL);
+    }
+  }
+
+  /**
+   * This method will check if dictionary and its metadata file exists for a given column
+   *
+   * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
+   *                                         tableName and columnIdentifier
+   * @return
+   */
+  protected boolean isFileExistsForGivenColumn(
+      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier) {
+    PathService pathService = CarbonCommonFactory.getPathService();
+    CarbonTablePath carbonTablePath = pathService
+        .getCarbonTablePath(dictionaryColumnUniqueIdentifier.getColumnIdentifier(), carbonStorePath,
+            dictionaryColumnUniqueIdentifier.getCarbonTableIdentifier());
+
+    String dictionaryFilePath =
+        carbonTablePath.getDictionaryFilePath(dictionaryColumnUniqueIdentifier
+            .getColumnIdentifier().getColumnId());
+    String dictionaryMetadataFilePath =
+        carbonTablePath.getDictionaryMetaFilePath(dictionaryColumnUniqueIdentifier
+            .getColumnIdentifier().getColumnId());
+    // check if both dictionary and its metadata file exists for a given column
+    return CarbonUtil.isFileExists(dictionaryFilePath) && CarbonUtil
+        .isFileExists(dictionaryMetadataFilePath);
+  }
+
+  /**
+   * This method will read dictionary metadata file and return the dictionary meta chunks
+   *
+   * @param dictionaryColumnUniqueIdentifier
+   * @return list of dictionary metadata chunks
+   * @throws IOException read and close method throws IO exception
+   */
+  protected CarbonDictionaryColumnMetaChunk readLastChunkFromDictionaryMetadataFile(
+      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier) throws IOException {
+    DictionaryService dictService = CarbonCommonFactory.getDictionaryService();
+    CarbonDictionaryMetadataReader columnMetadataReaderImpl = dictService
+        .getDictionaryMetadataReader(dictionaryColumnUniqueIdentifier.getCarbonTableIdentifier(),
+            dictionaryColumnUniqueIdentifier.getColumnIdentifier(), carbonStorePath);
+
+    CarbonDictionaryColumnMetaChunk carbonDictionaryColumnMetaChunk = null;
+    // read metadata file
+    try {
+      carbonDictionaryColumnMetaChunk =
+          columnMetadataReaderImpl.readLastEntryOfDictionaryMetaChunk();
+    } finally {
+      // close the metadata reader
+      columnMetadataReaderImpl.close();
+    }
+    return carbonDictionaryColumnMetaChunk;
+  }
+
+  /**
+   * This method will validate dictionary metadata file for any modification
+   *
+   * @param carbonFile
+   * @param fileTimeStamp
+   * @param endOffset
+   * @return
+   */
+  private boolean isDictionaryMetaFileModified(CarbonFile carbonFile, long fileTimeStamp,
+      long endOffset) {
+    return carbonFile.isFileModified(fileTimeStamp, endOffset);
+  }
+
+  /**
+   * This method will return the carbon file objetc based on its type (local, HDFS)
+   *
+   * @param dictionaryColumnUniqueIdentifier
+   * @return
+   */
+  private CarbonFile getDictionaryMetaCarbonFile(
+      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier) {
+    PathService pathService = CarbonCommonFactory.getPathService();
+    CarbonTablePath carbonTablePath = pathService
+        .getCarbonTablePath(dictionaryColumnUniqueIdentifier.getColumnIdentifier(), carbonStorePath,
+            dictionaryColumnUniqueIdentifier.getCarbonTableIdentifier());
+    String dictionaryFilePath =
+        carbonTablePath.getDictionaryMetaFilePath(dictionaryColumnUniqueIdentifier
+            .getColumnIdentifier().getColumnId());
+    FileFactory.FileType fileType = FileFactory.getFileType(dictionaryFilePath);
+    CarbonFile carbonFile = FileFactory.getCarbonFile(dictionaryFilePath, fileType);
+    return carbonFile;
+  }
+
+  /**
+   * This method will get the value for the given key. If value does not exist
+   * for the given key, it will check and load the value.
+   *
+   * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
+   *                                         tableName and columnIdentifier
+   * @param dictionaryInfo
+   * @param lruCacheKey
+   * @param loadSortIndex                    read and load sort index file in memory
+   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   */
+  protected void checkAndLoadDictionaryData(
+      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier,
+      DictionaryInfo dictionaryInfo, String lruCacheKey, boolean loadSortIndex)
+      throws CarbonUtilException {
+    try {
+      // read last segment dictionary meta chunk entry to get the end offset of file
+      CarbonFile carbonFile = getDictionaryMetaCarbonFile(dictionaryColumnUniqueIdentifier);
+      boolean dictionaryMetaFileModified =
+          isDictionaryMetaFileModified(carbonFile, dictionaryInfo.getFileTimeStamp(),
+              dictionaryInfo.getDictionaryMetaFileLength());
+      // if dictionary metadata file is modified then only read the last entry from dictionary
+      // meta file
+      if (dictionaryMetaFileModified) {
+        synchronized (dictionaryInfo) {
+          carbonFile = getDictionaryMetaCarbonFile(dictionaryColumnUniqueIdentifier);
+          dictionaryMetaFileModified =
+              isDictionaryMetaFileModified(carbonFile, dictionaryInfo.getFileTimeStamp(),
+                  dictionaryInfo.getDictionaryMetaFileLength());
+          // Double Check :
+          // if dictionary metadata file is modified then only read the last entry from dictionary
+          // meta file
+          if (dictionaryMetaFileModified) {
+            CarbonDictionaryColumnMetaChunk carbonDictionaryColumnMetaChunk =
+                readLastChunkFromDictionaryMetadataFile(dictionaryColumnUniqueIdentifier);
+            // required size will be size total size of file - offset till file is
+            // already read
+            long requiredSize =
+                carbonDictionaryColumnMetaChunk.getEnd_offset() - dictionaryInfo.getMemorySize();
+            if (requiredSize > 0) {
+              boolean columnAddedToLRUCache =
+                  carbonLRUCache.put(lruCacheKey, dictionaryInfo, requiredSize);
+              // if column is successfully added to lru cache then only load the
+              // dictionary data
+              if (columnAddedToLRUCache) {
+                // load dictionary data
+                loadDictionaryData(dictionaryInfo, dictionaryColumnUniqueIdentifier,
+                    dictionaryInfo.getMemorySize(), carbonDictionaryColumnMetaChunk.getEnd_offset(),
+                    loadSortIndex);
+                // set the end offset till where file is read
+                dictionaryInfo
+                    .setOffsetTillFileIsRead(carbonDictionaryColumnMetaChunk.getEnd_offset());
+                dictionaryInfo.setFileTimeStamp(carbonFile.getLastModifiedTime());
+                dictionaryInfo.setDictionaryMetaFileLength(carbonFile.getSize());
+              } else {
+                throw new CarbonUtilException(
+                    "Cannot load dictionary into memory. Not enough memory available");
+              }
+            }
+          }
+        }
+      }
+      // increment the column access count
+      incrementDictionaryAccessCount(dictionaryInfo);
+    } catch (IOException e) {
+      throw new CarbonUtilException(e.getMessage());
+    }
+  }
+
+  /**
+   * This method will prepare the lru cache key and return the same
+   *
+   * @param columnIdentifier
+   * @return
+   */
+  protected String getLruCacheKey(String columnIdentifier, CacheType cacheType) {
+    String lruCacheKey =
+        columnIdentifier + CarbonCommonConstants.UNDERSCORE + cacheType.getCacheName();
+    return lruCacheKey;
+  }
+
+  /**
+   * This method will check and load the dictionary file in memory for a given column
+   *
+   * @param dictionaryInfo                   holds dictionary information and data
+   * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
+   *                                         tableName and columnIdentifier
+   * @param dictionaryChunkStartOffset       start offset from where dictionary file has to
+   *                                         be read
+   * @param dictionaryChunkEndOffset         end offset till where dictionary file has to
+   *                                         be read
+   * @param loadSortIndex
+   * @throws IOException
+   */
+  private void loadDictionaryData(DictionaryInfo dictionaryInfo,
+      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier,
+      long dictionaryChunkStartOffset, long dictionaryChunkEndOffset, boolean loadSortIndex)
+      throws IOException {
+    DictionaryCacheLoader dictionaryCacheLoader =
+        new DictionaryCacheLoaderImpl(dictionaryColumnUniqueIdentifier.getCarbonTableIdentifier(),
+            carbonStorePath);
+    dictionaryCacheLoader
+        .load(dictionaryInfo, dictionaryColumnUniqueIdentifier.getColumnIdentifier(),
+            dictionaryChunkStartOffset, dictionaryChunkEndOffset, loadSortIndex);
+  }
+
+  /**
+   * This method will increment the access count for a given dictionary column
+   *
+   * @param dictionaryInfo
+   */
+  protected void incrementDictionaryAccessCount(DictionaryInfo dictionaryInfo) {
+    dictionaryInfo.incrementAccessCount();
+  }
+
+  /**
+   * This method will update the dictionary acceess count which is required for its removal
+   * from column LRU cache
+   *
+   * @param dictionaryList
+   */
+  protected void clearDictionary(List<Dictionary> dictionaryList) {
+    for (Dictionary dictionary : dictionaryList) {
+      dictionary.clear();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cd6a4ff3/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
new file mode 100644
index 0000000..08d9bef
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
@@ -0,0 +1,283 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache.dictionary;
+
+import java.nio.charset.Charset;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.carbondata.core.carbon.metadata.datatype.DataType;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.ByteUtil;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+/**
+ * class that implements methods specific for dictionary data look up
+ */
+public class ColumnDictionaryInfo extends AbstractColumnDictionaryInfo {
+
+  /**
+   * index after members are sorted
+   */
+  private AtomicReference<List<Integer>> sortOrderReference =
+      new AtomicReference<List<Integer>>(new ArrayList<Integer>());
+
+  /**
+   * inverted index to retrieve the member
+   */
+  private AtomicReference<List<Integer>> sortReverseOrderReference =
+      new AtomicReference<List<Integer>>(new ArrayList<Integer>());
+
+  private DataType dataType;
+
+  public ColumnDictionaryInfo(DataType dataType) {
+    this.dataType = dataType;
+  }
+
+  /**
+   * This method will find and return the surrogate key for a given dictionary value
+   * Applicable scenario:
+   * 1. Incremental data load : Dictionary will not be generated for existing values. For
+   * that values have to be looked up in the existing dictionary cache.
+   * 2. Filter scenarios where from value surrogate key has to be found.
+   *
+   * @param value dictionary value as byte array
+   * @return if found returns key else 0
+   */
+  @Override public int getSurrogateKey(byte[] value) {
+    return getSurrogateKeyFromDictionaryValue(value);
+  }
+
+  /**
+   * This method will find and return the sort index for a given dictionary id.
+   * Applicable scenarios:
+   * 1. Used in case of order by queries when data sorting is required
+   *
+   * @param surrogateKey a unique ID for a dictionary value
+   * @return if found returns key else 0
+   */
+  @Override public int getSortedIndex(int surrogateKey) {
+    if (surrogateKey > sortReverseOrderReference.get().size()
+        || surrogateKey < MINIMUM_SURROGATE_KEY) {
+      return -1;
+    }
+    // decrement surrogate key as surrogate key basically means the index in array list
+    // because surrogate key starts from 1 and index of list from 0, so it needs to be
+    // decremented by 1
+    return sortReverseOrderReference.get().get(surrogateKey - 1);
+  }
+
+  /**
+   * This method will find and return the dictionary value from sorted index.
+   * Applicable scenarios:
+   * 1. Query final result preparation in case of order by queries:
+   * While convert the final result which will
+   * be surrogate key back to original dictionary values this method will be used
+   *
+   * @param sortedIndex sort index of dictionary value
+   * @return value if found else null
+   */
+  @Override public String getDictionaryValueFromSortedIndex(int sortedIndex) {
+    if (sortedIndex > sortReverseOrderReference.get().size()
+        || sortedIndex < MINIMUM_SURROGATE_KEY) {
+      return null;
+    }
+    // decrement surrogate key as surrogate key basically means the index in array list
+    // because surrogate key starts from 1, sort index will start form 1 and index
+    // of list from 0, so it needs to be decremented by 1
+    int surrogateKey = sortOrderReference.get().get(sortedIndex - 1);
+    return getDictionaryValueForKey(surrogateKey);
+  }
+
+  /**
+   * This method will add a new dictionary chunk to existing list of dictionary chunks
+   *
+   * @param dictionaryChunk
+   */
+  @Override public void addDictionaryChunk(List<byte[]> dictionaryChunk) {
+    dictionaryChunks.add(dictionaryChunk);
+  }
+
+  /**
+   * This method will set the sort order index of a dictionary column.
+   * Sort order index if the index of dictionary values after they are sorted.
+   *
+   * @param sortOrderIndex
+   */
+  @Override public void setSortOrderIndex(List<Integer> sortOrderIndex) {
+    sortOrderReference.set(sortOrderIndex);
+  }
+
+  /**
+   * This method will set the sort reverse index of a dictionary column.
+   * Sort reverse index is the index of dictionary values before they are sorted.
+   *
+   * @param sortReverseOrderIndex
+   */
+  @Override public void setSortReverseOrderIndex(List<Integer> sortReverseOrderIndex) {
+    sortReverseOrderReference.set(sortReverseOrderIndex);
+  }
+
+  /**
+   * This method will apply binary search logic to find the surrogate key for the
+   * given value
+   *
+   * @param key to be searched
+   * @return
+   */
+  private int getSurrogateKeyFromDictionaryValue(byte[] key) {
+    String filterKey = new String(key, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+    int low = 0;
+    List<Integer> sortedSurrogates = sortOrderReference.get();
+    int high = sortedSurrogates.size() - 1;
+    while (low <= high) {
+      int mid = (low + high) >>> 1;
+      int surrogateKey = sortedSurrogates.get(mid);
+      byte[] dictionaryValue = getDictionaryBytesFromSurrogate(surrogateKey);
+      int cmp = -1;
+      if (this.getDataType() != DataType.STRING) {
+        cmp = compareFilterKeyWithDictionaryKey(new String(dictionaryValue), filterKey,
+            this.getDataType());
+
+      } else {
+        cmp = ByteUtil.UnsafeComparer.INSTANCE.compareTo(dictionaryValue, key);
+      }
+      if (cmp < 0) {
+        low = mid + 1;
+      } else if (cmp > 0) {
+        high = mid - 1;
+      } else {
+        return surrogateKey; // key found
+      }
+    }
+    return 0;
+  }
+
+  /**
+   * This method will apply binary search logic to find the surrogate key for the
+   * given value
+   *
+   * @param byteValuesOfFilterMembers to be searched
+   * @param surrogates
+   * @return
+   */
+  public void getIncrementalSurrogateKeyFromDictionary(List<byte[]> byteValuesOfFilterMembers,
+      List<Integer> surrogates) {
+    List<Integer> sortedSurrogates = sortOrderReference.get();
+    int low = 0;
+    for (byte[] byteValueOfFilterMember : byteValuesOfFilterMembers) {
+      String filterKey = new String(byteValueOfFilterMember,
+          Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
+      if (CarbonCommonConstants.MEMBER_DEFAULT_VAL.equals(filterKey)) {
+        surrogates.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL_SURROGATE_KEY);
+        continue;
+      }
+      int high = sortedSurrogates.size() - 1;
+      while (low <= high) {
+        int mid = (low + high) >>> 1;
+        int surrogateKey = sortedSurrogates.get(mid);
+        byte[] dictionaryValue = getDictionaryBytesFromSurrogate(surrogateKey);
+        int cmp = -1;
+        //fortify fix
+        if (null == dictionaryValue) {
+          cmp = -1;
+        } else if (this.getDataType() != DataType.STRING) {
+          cmp = compareFilterKeyWithDictionaryKey(new String(dictionaryValue), filterKey,
+              this.getDataType());
+
+        } else {
+          cmp =
+              ByteUtil.UnsafeComparer.INSTANCE.compareTo(dictionaryValue, byteValueOfFilterMember);
+        }
+        if (cmp < 0) {
+          low = mid + 1;
+        } else if (cmp > 0) {
+          high = mid - 1;
+        } else {
+
+          surrogates.add(surrogateKey);
+          low = mid;
+          break;
+        }
+      }
+    }
+    //Default value has to be added
+    if (surrogates.isEmpty()) {
+      surrogates.add(0);
+    }
+  }
+
+  private int compareFilterKeyWithDictionaryKey(String dictionaryVal, String memberVal,
+      DataType dataType) {
+    try {
+      switch (dataType) {
+        case SHORT:
+          return Short.compare((Short.parseShort(dictionaryVal)), (Short.parseShort(memberVal)));
+        case INT:
+          return Integer.compare((Integer.parseInt(dictionaryVal)), (Integer.parseInt(memberVal)));
+        case DOUBLE:
+          return Double
+              .compare((Double.parseDouble(dictionaryVal)), (Double.parseDouble(memberVal)));
+        case LONG:
+          return Long.compare((Long.parseLong(dictionaryVal)), (Long.parseLong(memberVal)));
+        case BOOLEAN:
+          return Boolean
+              .compare((Boolean.parseBoolean(dictionaryVal)), (Boolean.parseBoolean(memberVal)));
+        case TIMESTAMP:
+          SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
+              .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+                  CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
+          Date dateToStr;
+          Date dictionaryDate;
+          dateToStr = parser.parse(memberVal);
+          dictionaryDate = parser.parse(dictionaryVal);
+          return dictionaryDate.compareTo(dateToStr);
+        case DECIMAL:
+          java.math.BigDecimal javaDecValForDictVal = new java.math.BigDecimal(dictionaryVal);
+          java.math.BigDecimal javaDecValForMemberVal = new java.math.BigDecimal(memberVal);
+          return javaDecValForDictVal.compareTo(javaDecValForMemberVal);
+        default:
+          return -1;
+      }
+    } catch (Exception e) {
+      //In all data types excluding String data type the null member will be the highest
+      //while doing search in dictioary when the member comparison happens with filter member
+      //which is also null member, since the parsing fails in other data type except string
+      //explicit comparison is required, is both are null member then system has to return 0.
+      if (memberVal.equals(dictionaryVal)) {
+        return 0;
+      }
+      return 1;
+    }
+  }
+
+  /**
+   * getDataType().
+   *
+   * @return
+   */
+  public DataType getDataType() {
+    return dataType;
+  }
+
+}