You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by xu...@apache.org on 2018/10/18 01:56:58 UTC

[4/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j Logger directly

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java b/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
index 513e786..3d63560 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
@@ -19,9 +19,10 @@ package org.apache.carbondata.core.util;
 
 import java.lang.reflect.Method;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 
+import org.apache.log4j.Logger;
+
 /**
  * This wrapper class is created so that core doesnt have direct dependency on spark
  * TODO: Need to have carbon implementation if carbon needs to be used without spark
@@ -30,7 +31,7 @@ public final class ObjectSizeCalculator {
   /**
    * Logger object for the class
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(ObjectSizeCalculator.class.getName());
 
   /**
@@ -63,7 +64,7 @@ public final class ObjectSizeCalculator {
     } catch (Throwable ex) {
       // throwable is being caught as external interface is being invoked through reflection
       // and runtime exceptions might get thrown
-      LOGGER.error(ex, "Could not access method SizeEstimator:estimate.Returning default value");
+      LOGGER.error("Could not access method SizeEstimator:estimate.Returning default value", ex);
       methodAccessible = false;
       return defValue;
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
index 931e106..027e6cb 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
@@ -23,8 +23,8 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.carbondata.common.constants.LoggerAction;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
 import org.apache.carbondata.core.cache.CacheProvider;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
@@ -56,12 +56,14 @@ import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CAR
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_TIMESTAMPFORMAT;
 import static org.apache.carbondata.core.constants.CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB;
 
+import org.apache.log4j.Logger;
+
 /**
  * This class maintains carbon session params
  */
 public class SessionParams implements Serializable, Cloneable {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CacheProvider.class.getName());
   private static final long serialVersionUID = -7801994600594915264L;
 
@@ -124,7 +126,8 @@ public class SessionParams implements Serializable, Cloneable {
         value = value.toUpperCase();
       }
       if (doAuditing) {
-        LOGGER.audit("The key " + key + " with value " + value + " added in the session param");
+        Audit.log(LOGGER,
+            "The key " + key + " with value " + value + " added in the session param");
       }
       sProps.put(key, value);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java b/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
index 16dacb2..196fd64 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
@@ -23,17 +23,17 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.log4j.Logger;
 
 /**
  * This class maintains task level metrics info for all spawned child threads and parent task thread
  */
 public class TaskMetricsMap {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(TaskMetricsMap.class.getName());
 
   public static final InheritableThreadLocal<Long> threadLocal = new InheritableThreadLocal<>();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java b/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
index eef2507..833ed8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
@@ -20,7 +20,6 @@ package org.apache.carbondata.core.util.path;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -31,6 +30,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.viewfs.ViewFileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
+import org.apache.log4j.Logger;
 
 /**
  * Implementation for HDFS utility methods
@@ -47,7 +47,7 @@ public class HDFSLeaseUtils {
   /**
    * LOGGER
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(HDFSLeaseUtils.class.getName());
 
   /**
@@ -128,8 +128,8 @@ public class HDFSLeaseUtils {
                     + retryInterval + " ms...");
             Thread.sleep(retryInterval);
           } catch (InterruptedException e) {
-            LOGGER.error(e,
-                "Interrupted exception occurred while recovering lease for file : " + filePath);
+            LOGGER.error(
+                "Interrupted exception occurred while recovering lease for file : " + filePath, e);
           }
         }
       } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
index 4cf3827..8e97705 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
@@ -22,13 +22,13 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.mutate.DeleteDeltaBlockDetails;
 
 import com.google.gson.Gson;
+import org.apache.log4j.Logger;
 
 /**
  * This class is responsible for writing the delete delta file
@@ -38,7 +38,7 @@ public class CarbonDeleteDeltaWriterImpl implements CarbonDeleteDeltaWriter {
   /**
    * LOGGER
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbonDeleteDeltaWriterImpl.class.getName());
 
   private String filePath;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
index 53411e9..7113771 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
@@ -23,7 +23,6 @@ import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -37,6 +36,7 @@ import org.apache.carbondata.core.util.path.HDFSLeaseUtils;
 import org.apache.carbondata.format.ColumnDictionaryChunk;
 import org.apache.carbondata.format.ColumnDictionaryChunkMeta;
 
+import org.apache.log4j.Logger;
 import org.apache.thrift.TBase;
 
 /**
@@ -47,7 +47,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
   /**
    * LOGGER
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbonDictionaryWriterImpl.class.getName());
 
   /**
@@ -352,7 +352,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
       // Cases to handle
       // 1. Handle File lease recovery
       if (HDFSLeaseUtils.checkExceptionMessageForLeaseRecovery(e.getMessage())) {
-        LOGGER.error(e, "Lease recovery exception encountered for file: " + dictionaryFile);
+        LOGGER.error("Lease recovery exception encountered for file: " + dictionaryFile, e);
         boolean leaseRecovered = HDFSLeaseUtils.recoverFileLease(dictionaryFile);
         if (leaseRecovered) {
           // try to open output stream again after recovering the lease on file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
index f25081d..8524c83 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
@@ -21,7 +21,6 @@ import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -33,6 +32,8 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.core.writer.ThriftWriter;
 import org.apache.carbondata.format.ColumnSortInfo;
 
+import org.apache.log4j.Logger;
+
 /**
  * The class responsible for writing the dictionary/column sort index and sort index inverted data
  * in the thrift format
@@ -61,7 +62,7 @@ public class CarbonDictionarySortIndexWriterImpl implements CarbonDictionarySort
   /**
    * Comment for <code>LOGGER</code>
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbonDictionarySortIndexWriterImpl.class.getName());
 
   /**
@@ -119,8 +120,8 @@ public class CarbonDictionarySortIndexWriterImpl implements CarbonDictionarySort
         this.sortIndexThriftWriter.open();
         sortIndexThriftWriter.write(columnSortInfo);
       } catch (IOException ie) {
-        LOGGER.error(ie,
-            "problem while writing the dictionary sort index file.");
+        LOGGER.error(
+            "problem while writing the dictionary sort index file.", ie);
         throw new IOException("problem while writing the dictionary sort index file.", ie);
       } finally {
         if (null != sortIndexThriftWriter) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
index 42d4afa..daebd9f 100644
--- a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
@@ -17,11 +17,15 @@
 
 package org.apache.carbondata.core.datastore.filesystem;
 
-import mockit.Mock;
-import mockit.MockUp;
-import org.apache.carbondata.common.logging.LogService;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+
 import org.apache.carbondata.common.logging.LogServiceFactory;
 
+import mockit.Mock;
+import mockit.MockUp;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -30,15 +34,11 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.util.Progressable;
+import org.apache.log4j.Logger;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
 
 public class HDFSCarbonFileTest {
 
-    private static final LogService LOGGER =
+    private static final Logger LOGGER =
             LogServiceFactory.getLogService(HDFSCarbonFileTest.class.getName());
     private static HDFSCarbonFile hdfsCarbonFile;
     private static FileStatus fileStatus = null;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java b/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
index 50fdcba..3032016 100644
--- a/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
@@ -17,23 +17,24 @@
 
 package org.apache.carbondata.core.load;
 
-import org.apache.carbondata.common.logging.LogService;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
 
+import org.apache.log4j.Logger;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-
-import static junit.framework.Assert.*;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotSame;
 
 public class LoadMetadataDetailsUnitTest {
 
   private LoadMetadataDetails loadMetadataDetails;
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(LoadMetadataDetailsUnitTest.class.getName());
 
   @Before public void setup() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
index 4734abd..ffb781a 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datamap.dev.DataMapWriter;
@@ -43,8 +41,6 @@ import org.apache.hadoop.util.hash.Hash;
 
 @InterfaceAudience.Internal
 public abstract class AbstractBloomDataMapWriter extends DataMapWriter {
-  private static final LogService LOG = LogServiceFactory.getLogService(
-      BloomDataMapWriter.class.getCanonicalName());
   private int bloomFilterSize;
   private double bloomFilterFpp;
   private boolean compressBloom;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
index a5376be..4ec215e 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
@@ -21,11 +21,18 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -63,6 +70,7 @@ import org.apache.carbondata.processing.loading.converter.impl.FieldEncoderFacto
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.bloom.CarbonBloomFilter;
 import org.apache.hadoop.util.bloom.Key;
+import org.apache.log4j.Logger;
 
 /**
  * BloomDataCoarseGrainMap is constructed in blocklet level. For each indexed column,
@@ -71,7 +79,7 @@ import org.apache.hadoop.util.bloom.Key;
  */
 @InterfaceAudience.Internal
 public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(BloomCoarseGrainDataMap.class.getName());
   private Map<String, CarbonColumn> name2Col;
   private Cache<BloomCacheKeyValue.CacheKey, BloomCacheKeyValue.CacheValue> cache;
@@ -136,7 +144,7 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
         this.name2Converters.put(indexedColumn.get(i).getColName(), fieldConverter);
       }
     } catch (IOException e) {
-      LOGGER.error(e, "Exception occurs while init index columns");
+      LOGGER.error("Exception occurs while init index columns", e);
       throw new RuntimeException(e);
     }
     this.badRecordLogHolder = new BadRecordLogHolder();
@@ -172,7 +180,7 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
     try {
       bloomQueryModels = createQueryModel(filterExp.getFilterExpression());
     } catch (DictionaryGenerationException | UnsupportedEncodingException e) {
-      LOGGER.error(e, "Exception occurs while creating query model");
+      LOGGER.error("Exception occurs while creating query model", e);
       throw new RuntimeException(e);
     }
     for (BloomQueryModel bloomQueryModel : bloomQueryModels) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
index 8974918..4064d53 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
@@ -18,12 +18,16 @@ package org.apache.carbondata.datamap.bloom;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
@@ -51,13 +55,14 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.events.Event;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
 
 /**
  * This class is for Bloom Filter for blocklet level
  */
 @InterfaceAudience.Internal
 public class BloomCoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDataMap> {
-  private static final LogService LOGGER = LogServiceFactory.getLogService(
+  private static final Logger LOGGER = LogServiceFactory.getLogService(
       BloomCoarseGrainDataMapFactory.class.getName());
   /**
    * property for size of bloom filter

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
index 4063c2e..34abd80 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
@@ -21,8 +21,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CarbonLRUCache;
 import org.apache.carbondata.core.memory.MemoryException;
@@ -38,8 +36,6 @@ import org.apache.hadoop.util.bloom.CarbonBloomFilter;
 @InterfaceAudience.Internal
 public class BloomDataMapCache
     implements Cache<BloomCacheKeyValue.CacheKey, BloomCacheKeyValue.CacheValue> {
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(BloomDataMapCache.class.getName());
 
   /**
    * CarbonLRU cache

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
index 2abdc3f..17813ba 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
@@ -16,13 +16,16 @@
  */
 package org.apache.carbondata.datamap.bloom;
 
-import java.io.*;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
@@ -31,6 +34,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.bloom.CarbonBloomFilter;
+import org.apache.log4j.Logger;
 
 /**
  * This class works for merging and loading bloom index
@@ -38,7 +42,7 @@ import org.apache.hadoop.util.bloom.CarbonBloomFilter;
 @InterfaceAudience.Internal
 public class BloomIndexFileStore {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
           LogServiceFactory.getLogService(BloomIndexFileStore.class.getName());
 
   // suffix of original generated file
@@ -83,7 +87,7 @@ public class BloomIndexFileStore {
         throw new RuntimeException("Failed to create directory " + mergeShardPath);
       }
     } catch (IOException e) {
-      LOGGER.error(e, "Error occurs while create directory " + mergeShardPath);
+      LOGGER.error("Error occurs while create directory " + mergeShardPath, e);
       throw new RuntimeException("Error occurs while create directory " + mergeShardPath);
     }
 
@@ -110,7 +114,7 @@ public class BloomIndexFileStore {
           CarbonUtil.closeStream(dataInputStream);
         }
       } catch (IOException e) {
-        LOGGER.error(e, "Error occurs while merge bloom index file of column: " + indexCol);
+        LOGGER.error("Error occurs while merge bloom index file of column: " + indexCol, e);
         // delete merge shard of bloom index for this segment when failed
         FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(mergeShardPath));
         throw new RuntimeException(
@@ -123,7 +127,7 @@ public class BloomIndexFileStore {
     try {
       FileFactory.deleteFile(mergeInprogressFile, FileFactory.getFileType(mergeInprogressFile));
     } catch (IOException e) {
-      LOGGER.error(e, "Error occurs while deleting file " + mergeInprogressFile);
+      LOGGER.error("Error occurs while deleting file " + mergeInprogressFile, e);
       throw new RuntimeException("Error occurs while deleting file " + mergeInprogressFile);
     }
     // remove old store
@@ -164,7 +168,7 @@ public class BloomIndexFileStore {
 
       return bloomFilters;
     } catch (IOException e) {
-      LOGGER.error(e, "Error occurs while reading bloom index");
+      LOGGER.error("Error occurs while reading bloom index", e);
       throw new RuntimeException("Error occurs while reading bloom index", e);
     } finally {
       CarbonUtil.closeStreams(dataInStream);
@@ -207,7 +211,7 @@ public class BloomIndexFileStore {
           String.format("Read %d bloom indices from %s", bloomFilters.size(), mergeIndexFile));
       return bloomFilters;
     } catch (IOException e) {
-      LOGGER.error(e, "Error occurs while reading merge bloom index");
+      LOGGER.error("Error occurs while reading merge bloom index", e);
       throw new RuntimeException("Error occurs while reading merge bloom index", e);
     } finally {
       CarbonUtil.closeStreams(mergeIndexInStream);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
index 0993218..a9155d9 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
@@ -27,7 +27,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datamap.dev.DataMapWriter;
@@ -48,7 +48,7 @@ import org.apache.hadoop.fs.Path;
 
 public class MinMaxDataWriter extends DataMapWriter {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(TableInfo.class.getName());
 
   private Object[] pageLevelMin, pageLevelMax;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
index 40dc975..510d87c 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
@@ -26,7 +26,7 @@ import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
 
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.dev.DataMapModel;
 import org.apache.carbondata.core.datamap.dev.cgdatamap.CoarseGrainDataMap;
@@ -53,7 +53,7 @@ import org.apache.hadoop.fs.PathFilter;
  */
 public class MinMaxIndexDataMap extends CoarseGrainDataMap {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(MinMaxIndexDataMap.class.getName());
 
   private String[] indexFilePath;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
index 7f54a0e..f113508 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
@@ -21,7 +21,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapMeta;
@@ -49,7 +49,7 @@ import org.apache.hadoop.conf.Configuration;
  * Min Max DataMap Factory
  */
 public class MinMaxIndexDataMapFactory extends CoarseGrainDataMapFactory {
-  private static final LogService LOGGER = LogServiceFactory.getLogService(
+  private static final Logger LOGGER = LogServiceFactory.getLogService(
       MinMaxIndexDataMapFactory.class.getName());
   private DataMapMeta dataMapMeta;
   private String dataMapName;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
index 7081fa4..7dcd307 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
@@ -24,7 +24,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.Segment;
@@ -40,6 +39,7 @@ import static org.apache.carbondata.datamap.lucene.LuceneDataMapWriter.flushCach
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
@@ -52,7 +52,7 @@ import org.roaringbitmap.RoaringBitmap;
 
 public class LuceneDataMapBuilder implements DataMapBuilder {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(LuceneDataMapWriter.class.getName());
 
   private String dataMapPath;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
index 3179584..68c3bcc 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
@@ -25,7 +25,6 @@ import java.util.Objects;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
@@ -53,6 +52,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.events.Event;
 
+import org.apache.log4j.Logger;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 
@@ -88,7 +88,7 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
   /**
    * Logger
    */
-  final LogService LOGGER = LogServiceFactory.getLogService(this.getClass().getName());
+  final Logger LOGGER = LogServiceFactory.getLogService(this.getClass().getName());
 
   /**
    * table's index columns
@@ -281,7 +281,7 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
     try {
       deleteDatamap();
     } catch (MalformedDataMapCommandException ex) {
-      LOGGER.error(ex, "failed to delete datamap directory ");
+      LOGGER.error("failed to delete datamap directory ", ex);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
index bdb17ed..9fd9409 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
@@ -27,7 +27,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.Segment;
@@ -42,6 +41,7 @@ import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.CharArraySet;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -73,7 +73,7 @@ public class LuceneDataMapWriter extends DataMapWriter {
   /**
    * logger
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(LuceneDataMapWriter.class.getName());
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
index 63f8d7a..048d41a 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
@@ -25,7 +25,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.dev.DataMapModel;
 import org.apache.carbondata.core.datamap.dev.fgdatamap.FineGrainBlocklet;
@@ -41,6 +40,7 @@ import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
 import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.document.Document;
@@ -64,7 +64,7 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
   /**
    * log information
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(LuceneFineGrainDataMap.class.getName());
 
   /**
@@ -436,7 +436,7 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
           }
         }
       } catch (IOException e) {
-        LOGGER.error(e, "Ignoring the exception, Error while closing the lucene index reader");
+        LOGGER.error("Ignoring the exception, Error while closing the lucene index reader", e);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
----------------------------------------------------------------------
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
index 9e0f8e5..5dc6b27 100644
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
+++ b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
@@ -27,6 +27,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 
 import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
 import org.apache.carbondata.core.datamap.DataMapStoreManager
 import org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchema
@@ -71,7 +72,7 @@ class MVAnalyzerRule(sparkSession: SparkSession) extends Rule[LogicalPlan] {
       val modularPlan = catalog.mvSession.sessionState.rewritePlan(plan).withMVTable
       if (modularPlan.find (_.rewritten).isDefined) {
         val compactSQL = modularPlan.asCompactSQL
-        LOGGER.audit(s"\n$compactSQL\n")
+        Audit.log(LOGGER, s"\n$compactSQL\n")
         val analyzed = sparkSession.sql(compactSQL).queryExecution.analyzed
         analyzed
       } else {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
index bc0e280..245d3e8 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
@@ -17,7 +17,6 @@
 
 package org.apache.carbondata.examples.sdk;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
@@ -25,17 +24,22 @@ import org.apache.carbondata.core.scan.expression.ColumnExpression;
 import org.apache.carbondata.core.scan.expression.LiteralExpression;
 import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression;
 import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.sdk.file.*;
+import org.apache.carbondata.sdk.file.CarbonReader;
+import org.apache.carbondata.sdk.file.CarbonWriter;
+import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
+import org.apache.carbondata.sdk.file.Field;
+import org.apache.carbondata.sdk.file.Schema;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.s3a.Constants;
+import org.apache.log4j.Logger;
 
 /**
  * Example for testing CarbonWriter on S3
  */
 public class SDKS3Example {
     public static void main(String[] args) throws Exception {
-        LogService logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
+        Logger logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
         if (args == null || args.length < 3) {
             logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
                 + "<s3-endpoint> [table-path-on-s3] [rows]");

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
index 1fac673..2462d8d 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
@@ -17,15 +17,15 @@
 
 package org.apache.carbondata.examples.sdk;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.scan.expression.ColumnExpression;
 import org.apache.carbondata.core.scan.expression.LiteralExpression;
 import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression;
-import org.apache.carbondata.sdk.file.*;
+import org.apache.carbondata.sdk.file.CarbonReader;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
 
 import static org.apache.hadoop.fs.s3a.Constants.ACCESS_KEY;
 import static org.apache.hadoop.fs.s3a.Constants.ENDPOINT;
@@ -36,7 +36,7 @@ import static org.apache.hadoop.fs.s3a.Constants.SECRET_KEY;
  */
 public class SDKS3ReadExample {
     public static void main(String[] args) throws Exception {
-        LogService logger = LogServiceFactory.getLogService(SDKS3ReadExample.class.getName());
+        Logger logger = LogServiceFactory.getLogService(SDKS3ReadExample.class.getName());
         if (args == null || args.length < 3) {
             logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
                 + "<s3-endpoint> [table-path-on-s3]");

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
index eb9ff7c..cda8b7a 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
@@ -23,7 +23,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.Segment;
@@ -54,6 +53,7 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
+import org.apache.log4j.Logger;
 
 /**
  * Outputcommitter which manages the segments during loading.It commits segment information to the
@@ -61,7 +61,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
  */
 public class CarbonOutputCommitter extends FileOutputCommitter {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbonOutputCommitter.class.getName());
 
   private ICarbonLock segmentLock;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 5525941..7fd9235 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -34,7 +34,6 @@ import java.util.Set;
 import java.util.UUID;
 
 import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
@@ -93,6 +92,7 @@ import org.apache.hadoop.mapred.TaskAttemptID;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.log4j.Logger;
 
 /**
  * This class will create store file based on provided schema
@@ -100,7 +100,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
  */
 public class StoreCreator {
 
-  private static LogService LOG =
+  private static final Logger LOG =
       LogServiceFactory.getLogService(StoreCreator.class.getCanonicalName());
   private AbsoluteTableIdentifier absoluteTableIdentifier;
   private String storePath = null;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
index 7641427..ccc0594 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
@@ -22,7 +22,6 @@ import java.text.SimpleDateFormat;
 import java.util.List;
 import java.util.Locale;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
@@ -45,6 +44,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.log4j.Logger;
 
 /**
  * Utility class
@@ -54,7 +54,7 @@ public class CarbonInputFormatUtil {
   /**
    * Attribute for Carbon LOGGER.
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbonProperties.class.getName());
 
   public static <V> CarbonTableInputFormat<V> createCarbonInputFormat(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index d4cf480..9382922 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -20,7 +20,6 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.exception.InvalidConfigurationException;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -47,12 +46,14 @@ import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Logger;
 
 public class MapredCarbonInputFormat extends CarbonTableInputFormat<ArrayWritable>
     implements InputFormat<Void, ArrayWritable>, CombineHiveInputFormat.AvoidSplitCombination {
   private static final String CARBON_TABLE = "mapreduce.input.carboninputformat.table";
 
-  private LogService LOGGER = LogServiceFactory.getLogService(this.getClass().getCanonicalName());
+  private static final Logger LOGGER =
+      LogServiceFactory.getLogService(MapredCarbonInputFormat.class.getCanonicalName());
 
   /**
    * this method will read the schema from the physical file and populate into CARBON_TABLE

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
index 4f5bb58..51677de 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
@@ -19,10 +19,8 @@ package org.apache.carbondata.presto;
 
 import java.io.IOException;
 import java.util.List;
+import java.util.Objects;
 
-import static java.util.Objects.requireNonNull;
-
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.presto.readers.PrestoVectorBlockBuilder;
 import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
@@ -35,6 +33,7 @@ import com.facebook.presto.spi.PrestoException;
 import com.facebook.presto.spi.block.Block;
 import com.facebook.presto.spi.block.LazyBlock;
 import com.facebook.presto.spi.block.LazyBlockLoader;
+import org.apache.log4j.Logger;
 
 import static com.google.common.base.Preconditions.checkState;
 
@@ -43,7 +42,7 @@ import static com.google.common.base.Preconditions.checkState;
  */
 class CarbondataPageSource implements ConnectorPageSource {
 
-  private static final LogService logger =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbondataPageSource.class.getName());
   private List<ColumnHandle> columnHandles;
   private boolean closed;
@@ -132,12 +131,12 @@ class CarbondataPageSource implements ConnectorPageSource {
   }
 
   private void closeWithSuppression(Throwable throwable) {
-    requireNonNull(throwable, "throwable is null");
+    Objects.requireNonNull(throwable, "throwable is null");
     try {
       close();
     } catch (RuntimeException e) {
       // Self-suppression not permitted
-      logger.error(e, e.getMessage());
+      LOGGER.error(e.getMessage(), e);
       if (throwable != e) {
         throwable.addSuppressed(e);
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
index 5a1e140..6ddee42 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
@@ -32,9 +32,6 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-import static java.util.Objects.requireNonNull;
-
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.DataMapStoreManager;
@@ -79,6 +76,7 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.log4j.Logger;
 import org.apache.thrift.TBase;
 
 import static org.apache.hadoop.fs.s3a.Constants.ACCESS_KEY;
@@ -127,7 +125,7 @@ public class CarbonTableReader {
   /**
    * Logger instance
    */
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(CarbonTableReader.class.getName());
 
   /**
@@ -136,7 +134,7 @@ public class CarbonTableReader {
   private List<String> schemaNames = new ArrayList<>();
 
   @Inject public CarbonTableReader(CarbonTableConfig config) {
-    this.config = requireNonNull(config, "CarbonTableConfig is null");
+    this.config = Objects.requireNonNull(config, "CarbonTableConfig is null");
     this.carbonCache = new AtomicReference(new HashMap());
     tableList = new ConcurrentSet<>();
     setS3Properties();
@@ -236,7 +234,7 @@ public class CarbonTableReader {
    * @return
    */
   public Set<String> getTableNames(String schema) {
-    requireNonNull(schema, "schema is null");
+    Objects.requireNonNull(schema, "schema is null");
     return updateTableList(schema);
   }
 
@@ -270,7 +268,7 @@ public class CarbonTableReader {
       throw new RuntimeException(e);
     }
 
-    requireNonNull(schemaTableName, "schemaTableName is null");
+    Objects.requireNonNull(schemaTableName, "schemaTableName is null");
     return loadTableMetadata(schemaTableName);
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
index 21de2ae..d56b465 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
@@ -49,14 +49,14 @@ class BlockPruneQueryTestCase extends QueryTest with BeforeAndAfterAll {
       }
     } catch {
       case ex: Exception =>
-        LOGGER.error(ex, "Build test file for block prune failed")
+        LOGGER.error("Build test file for block prune failed", ex)
     } finally {
       if (writer != null) {
         try {
           writer.close()
         } catch {
           case ex: Exception =>
-            LOGGER.error(ex, "Close output stream catching exception")
+            LOGGER.error("Close output stream catching exception", ex)
         }
       }
     }
@@ -102,7 +102,7 @@ class BlockPruneQueryTestCase extends QueryTest with BeforeAndAfterAll {
       }
     } catch {
       case ex: Exception =>
-        LOGGER.error(ex, "Delete temp test data file for block prune catching exception")
+        LOGGER.error("Delete temp test data file for block prune catching exception", ex)
     }
     sql("DROP TABLE IF EXISTS blockprune")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
index 3865f08..91e4219 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
@@ -53,14 +53,14 @@ class CarbonCustomBlockDistributionTest extends QueryTest with BeforeAndAfterAll
       }
     } catch {
       case ex: Exception =>
-        LOGGER.error(ex, "Build test file for block prune failed")
+        LOGGER.error("Build test file for block prune failed", ex)
     } finally {
       if (writer != null) {
         try {
           writer.close()
         } catch {
           case ex: Exception =>
-            LOGGER.error(ex, "Close output stream catching exception")
+            LOGGER.error("Close output stream catching exception", ex)
         }
       }
     }
@@ -107,7 +107,7 @@ class CarbonCustomBlockDistributionTest extends QueryTest with BeforeAndAfterAll
       }
     } catch {
       case ex: Exception =>
-        LOGGER.error(ex, "Delete temp test data file for block prune catching exception")
+        LOGGER.error("Delete temp test data file for block prune catching exception", ex)
     }
     sql("DROP TABLE IF EXISTS blockprune")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
index 1c8e2d2..3aa7fbf 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
@@ -18,14 +18,15 @@ package org.apache.carbondata.spark.dictionary.client;
 
 import java.nio.charset.Charset;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.dictionary.client.DictionaryClient;
 import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
 
 import com.google.common.collect.Lists;
 import io.netty.channel.nio.NioEventLoopGroup;
+import org.apache.log4j.Logger;
 import org.apache.spark.SecurityManager;
 import org.apache.spark.SparkConf;
 import org.apache.spark.network.TransportContext;
@@ -41,7 +42,7 @@ import org.apache.spark.network.util.TransportConf;
  */
 public class SecureDictionaryClient implements DictionaryClient {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(SecureDictionaryClient.class.getName());
 
   private SecureDictionaryClientHandler dictionaryClientHandler =
@@ -59,7 +60,7 @@ public class SecureDictionaryClient implements DictionaryClient {
    */
   @Override public void startClient(String secretKey, String address, int port,
       boolean encryptSecureServer) {
-    LOGGER.audit("Starting client on " + address + " " + port);
+    Audit.log(LOGGER, "Starting client on " + address + " " + port);
     long start = System.currentTimeMillis();
 
     SecurityManager securityMgr;
@@ -91,7 +92,7 @@ public class SecureDictionaryClient implements DictionaryClient {
     try {
       client = clientFactory.createClient(address, port);
     } catch (Exception e) {
-      LOGGER.error(e, "Dictionary Client Failed to bind to port:");
+      LOGGER.error("Dictionary Client Failed to bind to port:", e);
     }
     LOGGER.info(
         "Dictionary client Started, Total time spent : " + (System.currentTimeMillis() - start));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
index cdf2553..d3f27ed 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
@@ -20,13 +20,13 @@ import java.nio.ByteBuffer;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
 
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufAllocator;
 import io.netty.buffer.Unpooled;
+import org.apache.log4j.Logger;
 import org.apache.spark.network.client.RpcResponseCallback;
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.network.server.OneForOneStreamManager;
@@ -38,7 +38,7 @@ import org.apache.spark.network.server.StreamManager;
  */
 public class SecureDictionaryClientHandler extends RpcHandler {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(SecureDictionaryClientHandler.class.getName());
 
   private final BlockingQueue<DictionaryMessage> responseMsgQueue = new LinkedBlockingQueue<>();
@@ -58,7 +58,7 @@ public class SecureDictionaryClientHandler extends RpcHandler {
       key.writeData(buffer);
       resp = client.sendRpcSync(buffer.nioBuffer(), 100000);
     } catch (Exception e) {
-      LOGGER.error(e, "Error while send request to server ");
+      LOGGER.error("Error while send request to server ", e);
     }
     try {
       if (resp == null) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
index 995e520..a029da0 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
@@ -19,8 +19,8 @@ package org.apache.carbondata.spark.dictionary.server;
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
 import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessageType;
@@ -33,6 +33,7 @@ import com.google.common.collect.Lists;
 import io.netty.channel.EventLoopGroup;
 import io.netty.channel.nio.NioEventLoopGroup;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
 import org.apache.spark.SecurityManager;
 import org.apache.spark.SparkConf;
 import org.apache.spark.network.TransportContext;
@@ -47,7 +48,7 @@ import scala.Some;
  */
 public class SecureDictionaryServer extends AbstractDictionaryServer implements DictionaryServer  {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(SecureDictionaryServer.class.getName());
 
   private SecureDictionaryServerHandler secureDictionaryServerHandler;
@@ -73,10 +74,8 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
           return null;
         }
       });
-    } catch (IOException io) {
-      LOGGER.error(io, "Failed to start Dictionary Server in secure mode");
-    } catch (InterruptedException ie) {
-      LOGGER.error(ie, "Failed to start Dictionary Server in secure mode");
+    } catch (IOException | InterruptedException io) {
+      LOGGER.error("Failed to start Dictionary Server in secure mode", io);
     }
   }
 
@@ -146,13 +145,14 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
         //iteratively listening to newports
         context
             .createServer(host, newPort, Lists.<TransportServerBootstrap>newArrayList(bootstrap));
-        LOGGER.audit("Dictionary Server started, Time spent " + (System.currentTimeMillis() - start)
+        Audit.log(LOGGER,
+            "Dictionary Server started, Time spent " + (System.currentTimeMillis() - start)
             + " Listening on port " + newPort);
         this.port = newPort;
         this.host = host;
         break;
       } catch (Exception e) {
-        LOGGER.error(e, "Dictionary Server Failed to bind to port: " + newPort);
+        LOGGER.error("Dictionary Server Failed to bind to port: " + newPort, e);
         if (i == 9) {
           throw new RuntimeException("Dictionary Server Could not bind to any port");
         }
@@ -209,14 +209,12 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
           return null;
         }
       });
-    } catch (IOException io) {
-      LOGGER.error(io, "Failed to stop Dictionary Server in secure mode");
-    } catch (InterruptedException ie) {
-      LOGGER.error(ie, "Failed to stop Dictionary Server in secure mode");
+    } catch (IOException | InterruptedException e) {
+      LOGGER.error("Failed to stop Dictionary Server in secure mode", e);
     }
   }
 
-  public void initializeDictionaryGenerator(CarbonTable carbonTable) throws Exception {
+  public void initializeDictionaryGenerator(CarbonTable carbonTable) {
     secureDictionaryServerHandler.initializeTable(carbonTable);
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
index aaa4cf0..9e291a4 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
@@ -18,7 +18,6 @@ package org.apache.carbondata.spark.dictionary.server;
 
 import java.nio.ByteBuffer;
 
-import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.dictionary.generator.ServerDictionaryGenerator;
 import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
@@ -28,6 +27,7 @@ import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufAllocator;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.ChannelHandler;
+import org.apache.log4j.Logger;
 import org.apache.spark.network.client.RpcResponseCallback;
 import org.apache.spark.network.client.TransportClient;
 import org.apache.spark.network.server.OneForOneStreamManager;
@@ -39,7 +39,7 @@ import org.apache.spark.network.server.StreamManager;
  */
 @ChannelHandler.Sharable public class SecureDictionaryServerHandler extends RpcHandler {
 
-  private static final LogService LOGGER =
+  private static final Logger LOGGER =
       LogServiceFactory.getLogService(SecureDictionaryServerHandler.class.getName());
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
index 3864b5d..df173cd 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
@@ -29,6 +29,7 @@ import org.apache.spark.unsafe.types.UTF8String
 import org.apache.carbondata.common.Strings
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile
 import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -158,7 +159,7 @@ object CarbonStore {
       carbonTable: CarbonTable,
       forceTableClean: Boolean,
       currentTablePartitions: Option[Seq[PartitionSpec]] = None): Unit = {
-    LOGGER.audit(s"The clean files request has been received for $dbName.$tableName")
+    Audit.log(LOGGER, s"The clean files request has been received for $dbName.$tableName")
     var carbonCleanFilesLock: ICarbonLock = null
     val absoluteTableIdentifier = if (forceTableClean) {
       AbsoluteTableIdentifier.from(tablePath, dbName, tableName, tableName)
@@ -202,7 +203,7 @@ object CarbonStore {
         CarbonLockUtil.fileUnlock(carbonCleanFilesLock, LockUsage.CLEAN_FILES_LOCK)
       }
     }
-    LOGGER.audit(s"Clean files operation is success for $dbName.$tableName.")
+    Audit.log(LOGGER, s"Clean files operation is success for $dbName.$tableName.")
   }
 
   /**
@@ -281,7 +282,7 @@ object CarbonStore {
       tableName: String,
       carbonTable: CarbonTable): Unit = {
 
-    LOGGER.audit(s"Delete segment by Id request has been received for $dbName.$tableName")
+    Audit.log(LOGGER, s"Delete segment by Id request has been received for $dbName.$tableName")
     validateLoadIds(loadids)
 
     val path = carbonTable.getMetadataPath
@@ -290,7 +291,7 @@ object CarbonStore {
       val invalidLoadIds = SegmentStatusManager.updateDeletionStatus(
         carbonTable.getAbsoluteTableIdentifier, loadids.asJava, path).asScala
       if (invalidLoadIds.isEmpty) {
-        LOGGER.audit(s"Delete segment by Id is successfull for $dbName.$tableName.")
+        Audit.log(LOGGER, s"Delete segment by Id is successfull for $dbName.$tableName.")
       } else {
         sys.error(s"Delete segment by Id is failed. Invalid ID is: ${invalidLoadIds.mkString(",")}")
       }
@@ -307,7 +308,7 @@ object CarbonStore {
       dbName: String,
       tableName: String,
       carbonTable: CarbonTable): Unit = {
-    LOGGER.audit(s"Delete segment by Id request has been received for $dbName.$tableName")
+    Audit.log(LOGGER, s"Delete segment by Id request has been received for $dbName.$tableName")
 
     val time = validateTimeFormat(timestamp)
     val path = carbonTable.getMetadataPath
@@ -320,7 +321,7 @@ object CarbonStore {
           path,
           time).asScala
       if (invalidLoadTimestamps.isEmpty) {
-        LOGGER.audit(s"Delete segment by date is successful for $dbName.$tableName.")
+        Audit.log(LOGGER, s"Delete segment by date is successful for $dbName.$tableName.")
       } else {
         sys.error("Delete segment by date is failed. No matching segment found.")
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
index f1a12bf..f5c65b3 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
@@ -271,11 +271,11 @@ object DataLoadProcessorStepOnSpark {
       }
     } catch {
       case e: CarbonDataWriterException =>
-        LOGGER.error(e, "Failed for table: " + tableName + " in Data Writer Step")
+        LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
         throw new CarbonDataLoadingException("Error while initializing data handler : " +
           e.getMessage)
       case e: Exception =>
-        LOGGER.error(e, "Failed for table: " + tableName + " in Data Writer Step")
+        LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
         throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage, e)
     } finally {
       if (rowConverter != null) {
@@ -316,11 +316,11 @@ object DataLoadProcessorStepOnSpark {
     e match {
       case e: CarbonDataLoadingException => throw e
       case e: TextParsingException =>
-        LOGGER.error(e, "Data Loading failed for table " + model.getTableName)
+        LOGGER.error("Data Loading failed for table " + model.getTableName, e)
         throw new CarbonDataLoadingException("Data Loading failed for table " + model.getTableName,
           e)
       case e: Exception =>
-        LOGGER.error(e, "Data Loading failed for table " + model.getTableName)
+        LOGGER.error("Data Loading failed for table " + model.getTableName, e)
         throw new CarbonDataLoadingException("Data Loading failed for table " + model.getTableName,
           e)
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
index ffd20b1..b3eb4f5 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
@@ -71,7 +71,7 @@ class AlterTableDropColumnRDD[K, V](
         }
       } catch {
         case ex: Exception =>
-          LOGGER.error(ex, ex.getMessage)
+          LOGGER.error(ex.getMessage, ex)
           throw ex
       }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
index 0c30186..d01caee 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
@@ -226,7 +226,7 @@ class CarbonMergerRDD[K, V](
 
       } catch {
         case e: Exception =>
-          LOGGER.error(e, "Compaction Failed ")
+          LOGGER.error("Compaction Failed ", e)
           throw e
       }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
index 6076e4a..ab8bb8b 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
@@ -24,12 +24,11 @@ import java.util.{Date, UUID}
 
 import scala.collection.mutable
 import scala.util.Random
-import scala.util.control.NonFatal
 
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.mapreduce.{TaskAttemptID, TaskType}
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
-import org.apache.spark.{Partition, SerializableWritable, SparkContext, SparkEnv, TaskContext}
+import org.apache.spark.{Partition, SparkEnv, TaskContext}
 import org.apache.spark.rdd.{DataLoadCoalescedRDD, DataLoadPartitionWrap, RDD}
 import org.apache.spark.serializer.SerializerInstance
 import org.apache.spark.sql.{Row, SparkSession}
@@ -38,13 +37,11 @@ import org.apache.spark.util.SparkUtil
 
 import org.apache.carbondata.common.CarbonIterator
 import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.common.logging.impl.StandardLogService
 import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.datastore.compression.CompressorFactory
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.metadata.datatype.DataTypes
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus}
-import org.apache.carbondata.core.util.{CarbonProperties, CarbonTimeStatisticsFactory, ThreadLocalSessionInfo, ThreadLocalTaskInfo}
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonTimeStatisticsFactory, ThreadLocalTaskInfo}
 import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.processing.loading.{DataLoadExecutor, FailureCauses, TableProcessingOperations}
 import org.apache.carbondata.processing.loading.csvinput.{BlockDetails, CSVInputFormat, CSVRecordReaderIterator}
@@ -221,9 +218,6 @@ class NewCarbonDataLoadRDD[K, V](
         CarbonQueryUtil.splitFilePath(carbonLoadModel.getFactFilePath, fileList, ",")
         model = carbonLoadModel.getCopyWithPartition(
           carbonLoadModel.getCsvHeader, carbonLoadModel.getCsvDelimiter)
-        StandardLogService.setThreadName(StandardLogService
-          .getPartitionID(model.getCarbonDataLoadSchema.getCarbonTable.getTableUniqueName)
-          , ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId + "")
         val readers =
           split.nodeBlocksDetail.map(format.createRecordReader(_, hadoopAttemptContext))
         readers.zipWithIndex.map { case (reader, index) =>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
index 6a4577f..6911b0b 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
@@ -22,7 +22,9 @@ import java.io.IOException
 import org.apache.spark.sql.execution.command.{AlterPartitionModel, DropPartitionCallableModel}
 import org.apache.spark.util.PartitionUtils
 
+import org.apache.carbondata.api.CarbonStore.LOGGER
 import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
 import org.apache.carbondata.core.metadata.schema.partition.PartitionType
 import org.apache.carbondata.spark.{AlterPartitionResultImpl, PartitionFactory}
 
@@ -87,7 +89,7 @@ object PartitionDropper {
             finalDropStatus = dropStatus.forall(_._2)
           }
           if (!finalDropStatus) {
-            logger.audit(s"Drop Partition request failed for table " +
+            Audit.log(logger, s"Drop Partition request failed for table " +
                          s"${ dbName }.${ tableName }")
             logger.error(s"Drop Partition request failed for table " +
                          s"${ dbName }.${ tableName }")
@@ -103,7 +105,7 @@ object PartitionDropper {
             case e: IOException => sys.error(s"Exception while delete original carbon files " +
                                              e.getMessage)
           }
-          logger.audit(s"Drop Partition request completed for table " +
+          Audit.log(logger, s"Drop Partition request completed for table " +
                        s"${ dbName }.${ tableName }")
           logger.info(s"Drop Partition request completed for table " +
                       s"${ dbName }.${ tableName }")
@@ -114,7 +116,7 @@ object PartitionDropper {
     } else {
       PartitionUtils.deleteOriginalCarbonFile(alterPartitionModel, absoluteTableIdentifier,
         Seq(partitionId).toList, dbName, tableName, partitionInfo)
-      logger.audit(s"Drop Partition request completed for table " +
+      Audit.log(logger, s"Drop Partition request completed for table " +
                    s"${ dbName }.${ tableName }")
       logger.info(s"Drop Partition request completed for table " +
                   s"${ dbName }.${ tableName }")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
index 0d437f6..ca9f049 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
@@ -22,7 +22,9 @@ import java.io.IOException
 import org.apache.spark.sql.execution.command.{AlterPartitionModel, SplitPartitionCallableModel}
 import org.apache.spark.util.PartitionUtils
 
+import org.apache.carbondata.api.CarbonStore.LOGGER
 import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
 import org.apache.carbondata.spark.{AlterPartitionResultImpl, PartitionFactory}
 
 object PartitionSplitter {
@@ -73,7 +75,7 @@ object PartitionSplitter {
          finalSplitStatus = splitStatus.forall(_._2)
        }
        if (!finalSplitStatus) {
-         logger.audit(s"Add/Split Partition request failed for table " +
+         Audit.log(logger, s"Add/Split Partition request failed for table " +
                       s"${ databaseName }.${ tableName }")
          logger.error(s"Add/Split Partition request failed for table " +
                       s"${ databaseName }.${ tableName }")
@@ -88,7 +90,7 @@ object PartitionSplitter {
          case e: IOException => sys.error(s"Exception while delete original carbon files " +
          e.getMessage)
        }
-       logger.audit(s"Add/Split Partition request completed for table " +
+       Audit.log(logger, s"Add/Split Partition request completed for table " +
                     s"${ databaseName }.${ tableName }")
        logger.info(s"Add/Split Partition request completed for table " +
                    s"${ databaseName }.${ tableName }")