You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by xu...@apache.org on 2018/10/18 01:56:55 UTC
[1/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j
Logger directly
Repository: carbondata
Updated Branches:
refs/heads/master 15d38260c -> 06adb5a03
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
index 26ae2d7..694b345 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
@@ -20,7 +20,6 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -40,6 +39,8 @@ import org.apache.carbondata.processing.store.CarbonFactHandler;
import org.apache.carbondata.processing.store.CarbonFactHandlerFactory;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* It reads data from batch of sorted files(it could be in-memory/disk based files)
* which are generated in previous sort step. And it writes data to carbondata file.
@@ -47,7 +48,7 @@ import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
*/
public class DataWriterBatchProcessorStepImpl extends AbstractDataLoadProcessorStep {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataWriterBatchProcessorStepImpl.class.getName());
private Map<String, LocalDictionaryGenerator> localDictionaryGeneratorMap;
@@ -112,7 +113,7 @@ public class DataWriterBatchProcessorStepImpl extends AbstractDataLoadProcessorS
i++;
}
} catch (Exception e) {
- LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterBatchProcessorStepImpl");
+ LOGGER.error("Failed for table: " + tableName + " in DataWriterBatchProcessorStepImpl", e);
if (e.getCause() instanceof BadRecordFoundException) {
throw new BadRecordFoundException(e.getCause().getMessage());
}
@@ -132,7 +133,7 @@ public class DataWriterBatchProcessorStepImpl extends AbstractDataLoadProcessorS
} catch (Exception e) {
// if throw exception from here dataHandler will not be closed.
// so just holding exception and later throwing exception
- LOGGER.error(e, "Failed for table: " + tableName + " in finishing data handler");
+ LOGGER.error("Failed for table: " + tableName + " in finishing data handler", e);
exception = new CarbonDataWriterException(
"Failed for table: " + tableName + " in finishing data handler", e);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
index cc038b9..3d704c9 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
@@ -29,7 +29,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -50,13 +49,15 @@ import org.apache.carbondata.processing.store.CarbonFactHandler;
import org.apache.carbondata.processing.store.CarbonFactHandlerFactory;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* It reads data from sorted files which are generated in previous sort step.
* And it writes data to carbondata file. It also generates mdk key while writing to carbondata file
*/
public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataWriterProcessorStepImpl.class.getName());
private long readCounter;
@@ -137,11 +138,11 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
throw new CarbonDataWriterException(e);
}
} catch (CarbonDataWriterException e) {
- LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
+ LOGGER.error("Failed for table: " + tableName + " in DataWriterProcessorStepImpl", e);
throw new CarbonDataLoadingException(
"Error while initializing data handler : " + e.getMessage());
} catch (Exception e) {
- LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
+ LOGGER.error("Failed for table: " + tableName + " in DataWriterProcessorStepImpl", e);
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage(), e);
}
return null;
@@ -208,7 +209,7 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
try {
dataHandler.finish();
} catch (Exception e) {
- LOGGER.error(e, "Failed for table: " + tableName + " in finishing data handler");
+ LOGGER.error("Failed for table: " + tableName + " in finishing data handler", e);
}
LOGGER.info("Record Processed For table: " + tableName);
String logMessage =
@@ -230,10 +231,10 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
try {
dataHandler.closeHandler();
} catch (CarbonDataWriterException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataLoadingException(e.getMessage(), e);
} catch (Exception e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage());
}
}
@@ -271,7 +272,7 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
LOGGER.info("closing all the DataMap writers registered to DataMap writer listener");
listener.finish();
} catch (IOException e) {
- LOGGER.error(e, "error while closing the datamap writers");
+ LOGGER.error("error while closing the datamap writers", e);
// ignoring the exception
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
index 8e68ef3..7bc7ae1 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
@@ -25,7 +25,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -51,6 +50,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.DataTypeConverter;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Executor class for executing the query on the selected segments to be merged.
@@ -58,7 +58,7 @@ import org.apache.hadoop.conf.Configuration;
*/
public class CarbonCompactionExecutor {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonCompactionExecutor.class.getName());
private final Map<String, List<DataFileFooter>> dataFileMetadataSegMapping;
private final SegmentProperties destinationSegProperties;
@@ -208,7 +208,7 @@ public class CarbonCompactionExecutor {
}
logStatistics(queryStartTime);
} catch (QueryExecutionException e) {
- LOGGER.error(e, "Problem while close. Ignoring the exception");
+ LOGGER.error("Problem while close. Ignoring the exception", e);
}
clearDictionaryFromQueryModel();
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionUtil.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionUtil.java
index 75a231e..74efdcb 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionUtil.java
@@ -22,7 +22,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
@@ -39,13 +38,14 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.commons.lang3.ArrayUtils;
+import org.apache.log4j.Logger;
/**
* Utility Class for the Compaction Flow.
*/
public class CarbonCompactionUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonCompactionUtil.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
index 5b001bf..a7fc3f8 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
@@ -21,10 +21,20 @@ import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -50,12 +60,13 @@ import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
import org.apache.carbondata.processing.util.CarbonLoaderUtil;
import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
/**
* utility class for load merging.
*/
public final class CarbonDataMergerUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDataMergerUtil.class.getName());
private CarbonDataMergerUtil() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
index 6133016..2a70dbd 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CompactionResultSortProcessor.java
@@ -20,7 +20,6 @@ import java.io.File;
import java.io.IOException;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
@@ -48,6 +47,8 @@ import org.apache.carbondata.processing.store.CarbonFactHandler;
import org.apache.carbondata.processing.store.CarbonFactHandlerFactory;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* This class will process the query result and convert the data
* into a format compatible for data load
@@ -57,7 +58,7 @@ public class CompactionResultSortProcessor extends AbstractResultProcessor {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CompactionResultSortProcessor.class.getName());
/**
* carbon load model that contains all the required information for load
@@ -381,7 +382,7 @@ public class CompactionResultSortProcessor extends AbstractResultProcessor {
try {
dataHandler.closeHandler();
} catch (CarbonDataWriterException e) {
- LOGGER.error(e, "Error in close data handler");
+ LOGGER.error("Error in close data handler", e);
throw new Exception("Error in close data handler", e);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java b/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
index 2911c05..1801c1d 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
@@ -22,7 +22,6 @@ import java.util.Comparator;
import java.util.List;
import java.util.PriorityQueue;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
@@ -45,6 +44,8 @@ import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
import org.apache.carbondata.processing.store.CarbonFactHandler;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* This is the Merger class responsible for the merging of the segments.
*/
@@ -61,7 +62,7 @@ public class RowResultMergerProcessor extends AbstractResultProcessor {
*/
private AbstractQueue<RawResultIterator> recordHolderHeap;
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(RowResultMergerProcessor.class.getName());
public RowResultMergerProcessor(String databaseName,
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/AbstractCarbonQueryExecutor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/AbstractCarbonQueryExecutor.java b/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/AbstractCarbonQueryExecutor.java
index dd5969f..c03f34e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/AbstractCarbonQueryExecutor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/AbstractCarbonQueryExecutor.java
@@ -22,7 +22,6 @@ import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
@@ -36,10 +35,11 @@ import org.apache.carbondata.core.scan.result.RowBatch;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
public abstract class AbstractCarbonQueryExecutor {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AbstractCarbonQueryExecutor.class.getName());
protected CarbonTable carbonTable;
protected QueryModel queryModel;
@@ -68,7 +68,7 @@ public abstract class AbstractCarbonQueryExecutor {
try {
queryExecutor.finish();
} catch (QueryExecutionException e) {
- LOGGER.error(e, "Problem while finish: ");
+ LOGGER.error("Problem while finish: ", e);
}
clearDictionaryFromQueryModel();
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/CarbonSplitExecutor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/CarbonSplitExecutor.java b/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/CarbonSplitExecutor.java
index d32757c..1b65a8b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/CarbonSplitExecutor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/CarbonSplitExecutor.java
@@ -23,7 +23,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
@@ -35,13 +34,14 @@ import org.apache.carbondata.core.scan.result.iterator.PartitionSpliterRawResult
import org.apache.carbondata.core.util.DataTypeConverter;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Used to read carbon blocks when add/split partition
*/
public class CarbonSplitExecutor extends AbstractCarbonQueryExecutor {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonSplitExecutor.class.getName());
public CarbonSplitExecutor(Map<String, TaskBlockInfo> segmentMapping, CarbonTable carbonTable) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/RowResultProcessor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/RowResultProcessor.java b/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/RowResultProcessor.java
index 00fbc7a..ac70f27 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/RowResultProcessor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/partition/spliter/RowResultProcessor.java
@@ -18,7 +18,6 @@ package org.apache.carbondata.processing.partition.spliter;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
@@ -33,6 +32,8 @@ import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
import org.apache.carbondata.processing.store.CarbonFactHandler;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
public class RowResultProcessor {
private CarbonFactHandler dataHandler;
@@ -40,7 +41,7 @@ public class RowResultProcessor {
private CarbonColumn[] noDicAndComplexColumns;
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(RowResultProcessor.class.getName());
@@ -84,7 +85,7 @@ public class RowResultProcessor {
}
processStatus = true;
} catch (CarbonDataWriterException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
LOGGER.error("Exception in executing RowResultProcessor" + e.getMessage());
processStatus = false;
} finally {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
index 35563d0..4ebc8d9 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
@@ -26,7 +26,6 @@ import java.util.NoSuchElementException;
import java.util.PriorityQueue;
import java.util.concurrent.Callable;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.util.CarbonUtil;
@@ -34,11 +33,13 @@ import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
+import org.apache.log4j.Logger;
+
public class IntermediateFileMerger implements Callable<Void> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(IntermediateFileMerger.class.getName());
/**
@@ -101,7 +102,7 @@ public class IntermediateFileMerger implements Callable<Void> {
LOGGER.info("============================== Intermediate Merge of " + fileConterConst +
" Sort Temp Files Cost Time: " + intermediateMergeCostTime + "(s)");
} catch (Exception e) {
- LOGGER.error(e, "Problem while intermediate merging");
+ LOGGER.error("Problem while intermediate merging", e);
clear();
throwable = e;
} finally {
@@ -110,7 +111,7 @@ public class IntermediateFileMerger implements Callable<Void> {
try {
finish();
} catch (CarbonSortKeyAndGroupByException e) {
- LOGGER.error(e, "Problem while deleting the merge file");
+ LOGGER.error("Problem while deleting the merge file", e);
throwable = e;
}
} else {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
index 5e9c28d..d2add29 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
@@ -33,7 +33,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
@@ -42,11 +41,13 @@ import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
+import org.apache.log4j.Logger;
+
public class SingleThreadFinalSortFilesMerger extends CarbonIterator<Object[]> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SingleThreadFinalSortFilesMerger.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortDataRows.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortDataRows.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortDataRows.java
index 637741c..996b844 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortDataRows.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortDataRows.java
@@ -28,7 +28,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -40,11 +39,13 @@ import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
public class SortDataRows {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SortDataRows.class.getName());
/**
* entryCount
@@ -136,7 +137,7 @@ public class SortDataRows {
semaphore.acquire();
dataSorterAndWriterExecutorService.execute(new DataSorterAndWriter(recordHolderListLocal));
} catch (InterruptedException e) {
- LOGGER.error(e, "exception occurred while trying to acquire a semaphore lock: ");
+ LOGGER.error("exception occurred while trying to acquire a semaphore lock: ", e);
throw new CarbonSortKeyAndGroupByException(e);
}
// create the new holder Array
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortIntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortIntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortIntermediateFileMerger.java
index 0e3f6bd..1f4f1e7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortIntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortIntermediateFileMerger.java
@@ -26,18 +26,19 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonThreadFactory;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
+import org.apache.log4j.Logger;
+
/**
* It does mergesort intermediate files to big file.
*/
public class SortIntermediateFileMerger {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SortIntermediateFileMerger.class.getName());
/**
@@ -118,7 +119,7 @@ public class SortIntermediateFileMerger {
try {
mergerTask.get(i).get();
} catch (InterruptedException | ExecutionException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonSortKeyAndGroupByException(e);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
index c4416d5..ecce232 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
@@ -20,7 +20,6 @@ import java.io.File;
import java.io.Serializable;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
@@ -32,10 +31,11 @@ import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
public class SortParameters implements Serializable {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SortParameters.class.getName());
/**
* tempFileLocation
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortTempFileChunkHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortTempFileChunkHolder.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortTempFileChunkHolder.java
index a1ef04e..ef9c3fa 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortTempFileChunkHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortTempFileChunkHolder.java
@@ -27,7 +27,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -38,12 +37,14 @@ import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
+import org.apache.log4j.Logger;
+
public class SortTempFileChunkHolder implements Comparable<SortTempFileChunkHolder> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SortTempFileChunkHolder.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
index 44fe704..67c4463 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
@@ -30,7 +30,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
@@ -52,6 +51,8 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.processing.datatypes.GenericDataType;
import org.apache.carbondata.processing.store.writer.CarbonFactDataWriter;
+import org.apache.log4j.Logger;
+
/**
* Fact data handler class to handle the fact data
*/
@@ -60,7 +61,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonFactDataHandlerColumnar.class.getName());
private CarbonFactDataHandlerModel model;
@@ -213,7 +214,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
dataRows = new ArrayList<>(this.pageSize);
this.entryCount = 0;
} catch (InterruptedException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataWriterException(e.getMessage(), e);
}
}
@@ -313,7 +314,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
processWriteTaskSubmitList(producerExecutorServiceTaskList);
processingComplete = true;
} catch (InterruptedException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataWriterException(e.getMessage(), e);
}
}
@@ -331,7 +332,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
service.shutdown();
service.awaitTermination(1, TimeUnit.DAYS);
} catch (InterruptedException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataWriterException(e.getMessage());
}
}
@@ -349,7 +350,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
try {
taskList.get(i).get();
} catch (InterruptedException | ExecutionException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataWriterException(e.getMessage(), e);
}
}
@@ -587,7 +588,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
tablePageList.put(tablePage, indexInNodeHolderArray);
return null;
} catch (Throwable throwable) {
- LOGGER.error(throwable, "Error in producer");
+ LOGGER.error("Error in producer", throwable);
consumerExecutorService.shutdownNow();
resetBlockletProcessingCount();
throw new CarbonDataWriterException(throwable.getMessage(), throwable);
@@ -626,7 +627,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
if (!processingComplete || blockletProcessingCount.get() > 0) {
producerExecutorService.shutdownNow();
resetBlockletProcessingCount();
- LOGGER.error(throwable, "Problem while writing the carbon data file");
+ LOGGER.error("Problem while writing the carbon data file", throwable);
throw new CarbonDataWriterException(throwable.getMessage());
}
} finally {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
index 4b42bfc..d086b6d 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
@@ -23,7 +23,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.TableSpec;
@@ -53,6 +52,8 @@ import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
import org.apache.carbondata.processing.loading.sort.SortScopeOptions;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
// This class contains all the data required for processing and writing the carbon data
// TODO: we should try to minimize this class as refactorying loading process
public class CarbonFactDataHandlerModel {
@@ -60,7 +61,7 @@ public class CarbonFactDataHandlerModel {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonFactDataHandlerModel.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
index 82129db..d6e4027 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
@@ -27,7 +27,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.ColumnType;
import org.apache.carbondata.core.datastore.TableSpec;
@@ -56,13 +55,15 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.processing.datatypes.GenericDataType;
+import org.apache.log4j.Logger;
+
/**
* Represent a page data for all columns, we store its data in columnar layout, so that
* all processing apply to TablePage can be done in vectorized fashion.
*/
public class TablePage {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(TablePage.class.getName());
// For all dimension and measure columns, we store the column data directly in the page,
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
index 37d33c2..64084e8 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -32,7 +32,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
@@ -55,9 +54,11 @@ import org.apache.carbondata.format.IndexHeader;
import org.apache.carbondata.processing.datamap.DataMapWriterListener;
import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
+import org.apache.log4j.Logger;
+
public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AbstractFactDataWriter.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
index e3cb052..d97d80c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.blocklet.BlockletEncodedColumnPage;
@@ -47,6 +46,8 @@ import static org.apache.carbondata.core.constants.CarbonV3DataFormatConstants.B
import static org.apache.carbondata.core.constants.CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB_DEFAULT_VALUE;
import static org.apache.carbondata.processing.loading.sort.SortScopeOptions.SortScope.NO_SORT;
+import org.apache.log4j.Logger;
+
/**
* Below class will be used to write the data in V3 format
* <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>>
@@ -56,7 +57,7 @@ import static org.apache.carbondata.processing.loading.sort.SortScopeOptions.Sor
*/
public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonFactDataWriterImplV3.class.getName());
/**
@@ -113,7 +114,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter {
buffer.flip();
currentOffsetInFile += fileChannel.write(buffer);
} catch (IOException e) {
- LOGGER.error(e, "Problem while writing the carbon file");
+ LOGGER.error("Problem while writing the carbon file", e);
throw new CarbonDataWriterException("Problem while writing the carbon file: ", e);
}
}
@@ -207,7 +208,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter {
}
pageId = 0;
} catch (IOException e) {
- LOGGER.error(e, "Problem while writing file");
+ LOGGER.error("Problem while writing file", e);
throw new CarbonDataWriterException("Problem while writing file", e);
} finally {
// clear the data holder
@@ -369,7 +370,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter {
commitCurrentFile(true);
writeIndexFile();
} catch (Exception e) {
- LOGGER.error(e, "Problem while writing the index file");
+ LOGGER.error("Problem while writing the index file", e);
exception = new CarbonDataWriterException("Problem while writing the index file", e);
} finally {
try {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/util/CarbonBadRecordUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonBadRecordUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonBadRecordUtil.java
index ecc6afb..d1f5500 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonBadRecordUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonBadRecordUtil.java
@@ -21,7 +21,6 @@ import java.io.File;
import java.io.IOException;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
@@ -35,12 +34,13 @@ import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
/**
* Common methods used for the bad record handling
*/
public class CarbonBadRecordUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDataProcessorUtil.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index 525f7ee..6f36ef8 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -31,7 +31,6 @@ import java.util.Set;
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.common.constants.LoggerAction;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.CarbonMetadata;
@@ -58,9 +57,10 @@ import org.apache.carbondata.processing.loading.sort.SortScopeOptions;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
public final class CarbonDataProcessorUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDataProcessorUtil.class.getName());
private CarbonDataProcessorUtil() {
@@ -77,7 +77,7 @@ public final class CarbonDataProcessorUtil {
try {
CarbonUtil.deleteFoldersAndFiles(file);
} catch (IOException | InterruptedException e) {
- LOGGER.error(e, "Failed to delete " + loc);
+ LOGGER.error("Failed to delete " + loc, e);
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
index f6cc485..98d3576 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
@@ -28,7 +28,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
@@ -60,13 +59,17 @@ import org.apache.carbondata.core.writer.CarbonIndexFileMergeWriter;
import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
import org.apache.carbondata.processing.merger.NodeMultiBlockRelation;
-import static org.apache.carbondata.core.enums.EscapeSequences.*;
+import static org.apache.carbondata.core.enums.EscapeSequences.BACKSPACE;
+import static org.apache.carbondata.core.enums.EscapeSequences.CARRIAGE_RETURN;
+import static org.apache.carbondata.core.enums.EscapeSequences.NEW_LINE;
+import static org.apache.carbondata.core.enums.EscapeSequences.TAB;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
public final class CarbonLoaderUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonLoaderUtil.class.getName());
private CarbonLoaderUtil() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/test/java/org/apache/carbondata/processing/util/CarbonLoaderUtilTest.java
----------------------------------------------------------------------
diff --git a/processing/src/test/java/org/apache/carbondata/processing/util/CarbonLoaderUtilTest.java b/processing/src/test/java/org/apache/carbondata/processing/util/CarbonLoaderUtilTest.java
index 94f8b84..23c35da 100644
--- a/processing/src/test/java/org/apache/carbondata/processing/util/CarbonLoaderUtilTest.java
+++ b/processing/src/test/java/org/apache/carbondata/processing/util/CarbonLoaderUtilTest.java
@@ -20,18 +20,18 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.block.Distributable;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Test;
public class CarbonLoaderUtilTest {
- private final static LogService LOGGER
+ private static final Logger LOGGER
= LogServiceFactory.getLogService(CarbonLoaderUtilTest.class.getName());
private List<Distributable> generateBlocks() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
index e4a65c0..9ed909a 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
@@ -30,7 +30,6 @@ import java.util.Random;
import java.util.UUID;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.keygenerator.directdictionary.timestamp.DateDirectDictionaryGenerator;
import org.apache.carbondata.core.metadata.datatype.DataType;
@@ -60,6 +59,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.log4j.Logger;
/**
* Writer Implementation to write Avro Record to carbondata file.
@@ -71,7 +71,7 @@ public class AvroCarbonWriter extends CarbonWriter {
private TaskAttemptContext context;
private ObjectArrayWritable writable;
private Schema avroSchema;
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonTable.class.getName());
AvroCarbonWriter(CarbonLoadModel loadModel, Configuration hadoopConf) throws IOException {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
index dd7f333..7bfc1cb 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
@@ -24,7 +24,6 @@ import java.util.List;
import java.util.Objects;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.row.CarbonRow;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -42,6 +41,7 @@ import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.log4j.Logger;
/**
* A CarbonStore implementation that works locally, without other compute framework dependency.
@@ -52,7 +52,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
@InterfaceAudience.Internal
class LocalCarbonStore extends MetaCachedCarbonStore {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LocalCarbonStore.class.getName());
@Override
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index ba8a49d..58f3f61 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -24,8 +24,9 @@ import java.util.*;
import org.apache.avro.generic.GenericData;
import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
@@ -819,7 +820,7 @@ public class CarbonReaderTest extends TestCase {
.addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION, path);
}
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonReaderTest.class.getName());
@Test
@@ -887,7 +888,7 @@ public class CarbonReaderTest extends TestCase {
e.printStackTrace();
Assert.fail(e.getMessage());
}
- LOGGER.audit("Bad record location:" + storeLocation);
+ Audit.log(LOGGER, "Bad record location:" + storeLocation);
File folder = new File(path);
Assert.assertTrue(folder.exists());
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
----------------------------------------------------------------------
diff --git a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
index 2d3e5fe..b1e87ce 100644
--- a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
+++ b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
@@ -18,11 +18,15 @@
package org.apache.carbondata.store.worker;
import java.io.IOException;
-import java.util.*;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Objects;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.datamap.DataMapChooser;
import org.apache.carbondata.core.datamap.DataMapDistributable;
import org.apache.carbondata.core.datamap.Segment;
@@ -53,6 +57,7 @@ import org.apache.carbondata.hadoop.CarbonRecordReader;
import org.apache.carbondata.hadoop.readsupport.impl.CarbonRowReadSupport;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
import org.apache.spark.search.SearchRequest;
import org.apache.spark.search.SearchResult;
import org.apache.spark.search.ShutdownRequest;
@@ -64,7 +69,7 @@ import org.apache.spark.search.ShutdownResponse;
@InterfaceAudience.Internal
public class SearchRequestHandler {
- private static final LogService LOG =
+ private static final Logger LOG =
LogServiceFactory.getLogService(SearchRequestHandler.class.getName());
public SearchResult handleSearch(SearchRequest request) {
@@ -96,7 +101,7 @@ public class SearchRequestHandler {
chooser = new DataMapChooser(table);
return chooser.chooseFGDataMap(filterInterface);
} catch (IOException e) {
- LOG.audit(e.getMessage());
+ Audit.log(LOG, e.getMessage());
return null;
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
index 672f6a6..1ec0030 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
@@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.compression.CompressorFactory;
@@ -62,12 +61,14 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.log4j.Logger;
+
/**
* Stream record writer
*/
public class CarbonStreamRecordWriter extends RecordWriter<Void, Object> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonStreamRecordWriter.class.getName());
// basic info
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java b/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
index 6ee6876..b436b18 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/segment/StreamSegment.java
@@ -26,7 +26,6 @@ import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
@@ -59,13 +58,14 @@ import org.apache.carbondata.streaming.CarbonStreamRecordWriter;
import org.apache.carbondata.streaming.index.StreamFileIndex;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.log4j.Logger;
/**
* streaming segment manager
*/
public class StreamSegment {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(StreamSegment.class.getName());
/**
@@ -301,8 +301,8 @@ public class StreamSegment {
writer.getMeasureDataTypes(), blockletRowCount);
} catch (Throwable ex) {
if (writer != null) {
- LOGGER.error(ex, "Failed to append batch data to stream segment: " +
- writer.getSegmentDir());
+ LOGGER.error("Failed to append batch data to stream segment: " +
+ writer.getSegmentDir(), ex);
writer.setHasException(true);
}
throw ex;
[4/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j
Logger directly
Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java b/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
index 513e786..3d63560 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/ObjectSizeCalculator.java
@@ -19,9 +19,10 @@ package org.apache.carbondata.core.util;
import java.lang.reflect.Method;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
+
/**
* This wrapper class is created so that core doesnt have direct dependency on spark
* TODO: Need to have carbon implementation if carbon needs to be used without spark
@@ -30,7 +31,7 @@ public final class ObjectSizeCalculator {
/**
* Logger object for the class
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ObjectSizeCalculator.class.getName());
/**
@@ -63,7 +64,7 @@ public final class ObjectSizeCalculator {
} catch (Throwable ex) {
// throwable is being caught as external interface is being invoked through reflection
// and runtime exceptions might get thrown
- LOGGER.error(ex, "Could not access method SizeEstimator:estimate.Returning default value");
+ LOGGER.error("Could not access method SizeEstimator:estimate.Returning default value", ex);
methodAccessible = false;
return defValue;
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
index 931e106..027e6cb 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
@@ -23,8 +23,8 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.carbondata.common.constants.LoggerAction;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.cache.CacheProvider;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
@@ -56,12 +56,14 @@ import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CAR
import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_TIMESTAMPFORMAT;
import static org.apache.carbondata.core.constants.CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB;
+import org.apache.log4j.Logger;
+
/**
* This class maintains carbon session params
*/
public class SessionParams implements Serializable, Cloneable {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CacheProvider.class.getName());
private static final long serialVersionUID = -7801994600594915264L;
@@ -124,7 +126,8 @@ public class SessionParams implements Serializable, Cloneable {
value = value.toUpperCase();
}
if (doAuditing) {
- LOGGER.audit("The key " + key + " with value " + value + " added in the session param");
+ Audit.log(LOGGER,
+ "The key " + key + " with value " + value + " added in the session param");
}
sProps.put(key, value);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java b/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
index 16dacb2..196fd64 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/TaskMetricsMap.java
@@ -23,17 +23,17 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.log4j.Logger;
/**
* This class maintains task level metrics info for all spawned child threads and parent task thread
*/
public class TaskMetricsMap {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(TaskMetricsMap.class.getName());
public static final InheritableThreadLocal<Long> threadLocal = new InheritableThreadLocal<>();
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java b/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
index eef2507..833ed8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/HDFSLeaseUtils.java
@@ -20,7 +20,6 @@ package org.apache.carbondata.core.util.path;
import java.io.FileNotFoundException;
import java.io.IOException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -31,6 +30,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.viewfs.ViewFileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
+import org.apache.log4j.Logger;
/**
* Implementation for HDFS utility methods
@@ -47,7 +47,7 @@ public class HDFSLeaseUtils {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(HDFSLeaseUtils.class.getName());
/**
@@ -128,8 +128,8 @@ public class HDFSLeaseUtils {
+ retryInterval + " ms...");
Thread.sleep(retryInterval);
} catch (InterruptedException e) {
- LOGGER.error(e,
- "Interrupted exception occurred while recovering lease for file : " + filePath);
+ LOGGER.error(
+ "Interrupted exception occurred while recovering lease for file : " + filePath, e);
}
}
} catch (IOException e) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
index 4cf3827..8e97705 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDeleteDeltaWriterImpl.java
@@ -22,13 +22,13 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.mutate.DeleteDeltaBlockDetails;
import com.google.gson.Gson;
+import org.apache.log4j.Logger;
/**
* This class is responsible for writing the delete delta file
@@ -38,7 +38,7 @@ public class CarbonDeleteDeltaWriterImpl implements CarbonDeleteDeltaWriter {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDeleteDeltaWriterImpl.class.getName());
private String filePath;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
index 53411e9..7113771 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonDictionaryWriterImpl.java
@@ -23,7 +23,6 @@ import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -37,6 +36,7 @@ import org.apache.carbondata.core.util.path.HDFSLeaseUtils;
import org.apache.carbondata.format.ColumnDictionaryChunk;
import org.apache.carbondata.format.ColumnDictionaryChunkMeta;
+import org.apache.log4j.Logger;
import org.apache.thrift.TBase;
/**
@@ -47,7 +47,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDictionaryWriterImpl.class.getName());
/**
@@ -352,7 +352,7 @@ public class CarbonDictionaryWriterImpl implements CarbonDictionaryWriter {
// Cases to handle
// 1. Handle File lease recovery
if (HDFSLeaseUtils.checkExceptionMessageForLeaseRecovery(e.getMessage())) {
- LOGGER.error(e, "Lease recovery exception encountered for file: " + dictionaryFile);
+ LOGGER.error("Lease recovery exception encountered for file: " + dictionaryFile, e);
boolean leaseRecovered = HDFSLeaseUtils.recoverFileLease(dictionaryFile);
if (leaseRecovered) {
// try to open output stream again after recovering the lease on file
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java b/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
index f25081d..8524c83 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/sortindex/CarbonDictionarySortIndexWriterImpl.java
@@ -21,7 +21,6 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -33,6 +32,8 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.carbondata.core.writer.ThriftWriter;
import org.apache.carbondata.format.ColumnSortInfo;
+import org.apache.log4j.Logger;
+
/**
* The class responsible for writing the dictionary/column sort index and sort index inverted data
* in the thrift format
@@ -61,7 +62,7 @@ public class CarbonDictionarySortIndexWriterImpl implements CarbonDictionarySort
/**
* Comment for <code>LOGGER</code>
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDictionarySortIndexWriterImpl.class.getName());
/**
@@ -119,8 +120,8 @@ public class CarbonDictionarySortIndexWriterImpl implements CarbonDictionarySort
this.sortIndexThriftWriter.open();
sortIndexThriftWriter.write(columnSortInfo);
} catch (IOException ie) {
- LOGGER.error(ie,
- "problem while writing the dictionary sort index file.");
+ LOGGER.error(
+ "problem while writing the dictionary sort index file.", ie);
throw new IOException("problem while writing the dictionary sort index file.", ie);
} finally {
if (null != sortIndexThriftWriter) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
index 42d4afa..daebd9f 100644
--- a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFileTest.java
@@ -17,11 +17,15 @@
package org.apache.carbondata.core.datastore.filesystem;
-import mockit.Mock;
-import mockit.MockUp;
-import org.apache.carbondata.common.logging.LogService;
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+
import org.apache.carbondata.common.logging.LogServiceFactory;
+import mockit.Mock;
+import mockit.MockUp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -30,15 +34,11 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.util.Progressable;
+import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
public class HDFSCarbonFileTest {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(HDFSCarbonFileTest.class.getName());
private static HDFSCarbonFile hdfsCarbonFile;
private static FileStatus fileStatus = null;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java b/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
index 50fdcba..3032016 100644
--- a/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/load/LoadMetadataDetailsUnitTest.java
@@ -17,23 +17,24 @@
package org.apache.carbondata.core.load;
-import org.apache.carbondata.common.logging.LogService;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
+import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Test;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-
-import static junit.framework.Assert.*;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotSame;
public class LoadMetadataDetailsUnitTest {
private LoadMetadataDetails loadMetadataDetails;
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LoadMetadataDetailsUnitTest.class.getName());
@Before public void setup() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
index 4734abd..ffb781a 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
@@ -23,8 +23,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
import org.apache.carbondata.core.datamap.dev.DataMapWriter;
@@ -43,8 +41,6 @@ import org.apache.hadoop.util.hash.Hash;
@InterfaceAudience.Internal
public abstract class AbstractBloomDataMapWriter extends DataMapWriter {
- private static final LogService LOG = LogServiceFactory.getLogService(
- BloomDataMapWriter.class.getCanonicalName());
private int bloomFilterSize;
private double bloomFilterFpp;
private boolean compressBloom;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
index a5376be..4ec215e 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
@@ -21,11 +21,18 @@ import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -63,6 +70,7 @@ import org.apache.carbondata.processing.loading.converter.impl.FieldEncoderFacto
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.bloom.CarbonBloomFilter;
import org.apache.hadoop.util.bloom.Key;
+import org.apache.log4j.Logger;
/**
* BloomDataCoarseGrainMap is constructed in blocklet level. For each indexed column,
@@ -71,7 +79,7 @@ import org.apache.hadoop.util.bloom.Key;
*/
@InterfaceAudience.Internal
public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BloomCoarseGrainDataMap.class.getName());
private Map<String, CarbonColumn> name2Col;
private Cache<BloomCacheKeyValue.CacheKey, BloomCacheKeyValue.CacheValue> cache;
@@ -136,7 +144,7 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
this.name2Converters.put(indexedColumn.get(i).getColName(), fieldConverter);
}
} catch (IOException e) {
- LOGGER.error(e, "Exception occurs while init index columns");
+ LOGGER.error("Exception occurs while init index columns", e);
throw new RuntimeException(e);
}
this.badRecordLogHolder = new BadRecordLogHolder();
@@ -172,7 +180,7 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
try {
bloomQueryModels = createQueryModel(filterExp.getFilterExpression());
} catch (DictionaryGenerationException | UnsupportedEncodingException e) {
- LOGGER.error(e, "Exception occurs while creating query model");
+ LOGGER.error("Exception occurs while creating query model", e);
throw new RuntimeException(e);
}
for (BloomQueryModel bloomQueryModel : bloomQueryModels) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
index 8974918..4064d53 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
@@ -18,12 +18,16 @@ package org.apache.carbondata.datamap.bloom;
import java.io.File;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
@@ -51,13 +55,14 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.carbondata.events.Event;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
/**
* This class is for Bloom Filter for blocklet level
*/
@InterfaceAudience.Internal
public class BloomCoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDataMap> {
- private static final LogService LOGGER = LogServiceFactory.getLogService(
+ private static final Logger LOGGER = LogServiceFactory.getLogService(
BloomCoarseGrainDataMapFactory.class.getName());
/**
* property for size of bloom filter
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
index 4063c2e..34abd80 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
@@ -21,8 +21,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CarbonLRUCache;
import org.apache.carbondata.core.memory.MemoryException;
@@ -38,8 +36,6 @@ import org.apache.hadoop.util.bloom.CarbonBloomFilter;
@InterfaceAudience.Internal
public class BloomDataMapCache
implements Cache<BloomCacheKeyValue.CacheKey, BloomCacheKeyValue.CacheValue> {
- private static final LogService LOGGER =
- LogServiceFactory.getLogService(BloomDataMapCache.class.getName());
/**
* CarbonLRU cache
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
index 2abdc3f..17813ba 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
@@ -16,13 +16,16 @@
*/
package org.apache.carbondata.datamap.bloom;
-import java.io.*;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
@@ -31,6 +34,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.bloom.CarbonBloomFilter;
+import org.apache.log4j.Logger;
/**
* This class works for merging and loading bloom index
@@ -38,7 +42,7 @@ import org.apache.hadoop.util.bloom.CarbonBloomFilter;
@InterfaceAudience.Internal
public class BloomIndexFileStore {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BloomIndexFileStore.class.getName());
// suffix of original generated file
@@ -83,7 +87,7 @@ public class BloomIndexFileStore {
throw new RuntimeException("Failed to create directory " + mergeShardPath);
}
} catch (IOException e) {
- LOGGER.error(e, "Error occurs while create directory " + mergeShardPath);
+ LOGGER.error("Error occurs while create directory " + mergeShardPath, e);
throw new RuntimeException("Error occurs while create directory " + mergeShardPath);
}
@@ -110,7 +114,7 @@ public class BloomIndexFileStore {
CarbonUtil.closeStream(dataInputStream);
}
} catch (IOException e) {
- LOGGER.error(e, "Error occurs while merge bloom index file of column: " + indexCol);
+ LOGGER.error("Error occurs while merge bloom index file of column: " + indexCol, e);
// delete merge shard of bloom index for this segment when failed
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(mergeShardPath));
throw new RuntimeException(
@@ -123,7 +127,7 @@ public class BloomIndexFileStore {
try {
FileFactory.deleteFile(mergeInprogressFile, FileFactory.getFileType(mergeInprogressFile));
} catch (IOException e) {
- LOGGER.error(e, "Error occurs while deleting file " + mergeInprogressFile);
+ LOGGER.error("Error occurs while deleting file " + mergeInprogressFile, e);
throw new RuntimeException("Error occurs while deleting file " + mergeInprogressFile);
}
// remove old store
@@ -164,7 +168,7 @@ public class BloomIndexFileStore {
return bloomFilters;
} catch (IOException e) {
- LOGGER.error(e, "Error occurs while reading bloom index");
+ LOGGER.error("Error occurs while reading bloom index", e);
throw new RuntimeException("Error occurs while reading bloom index", e);
} finally {
CarbonUtil.closeStreams(dataInStream);
@@ -207,7 +211,7 @@ public class BloomIndexFileStore {
String.format("Read %d bloom indices from %s", bloomFilters.size(), mergeIndexFile));
return bloomFilters;
} catch (IOException e) {
- LOGGER.error(e, "Error occurs while reading merge bloom index");
+ LOGGER.error("Error occurs while reading merge bloom index", e);
throw new RuntimeException("Error occurs while reading merge bloom index", e);
} finally {
CarbonUtil.closeStreams(mergeIndexInStream);
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
index 0993218..a9155d9 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
@@ -27,7 +27,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.Segment;
import org.apache.carbondata.core.datamap.dev.DataMapWriter;
@@ -48,7 +48,7 @@ import org.apache.hadoop.fs.Path;
public class MinMaxDataWriter extends DataMapWriter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(TableInfo.class.getName());
private Object[] pageLevelMin, pageLevelMax;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
index 40dc975..510d87c 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
@@ -26,7 +26,7 @@ import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.dev.DataMapModel;
import org.apache.carbondata.core.datamap.dev.cgdatamap.CoarseGrainDataMap;
@@ -53,7 +53,7 @@ import org.apache.hadoop.fs.PathFilter;
*/
public class MinMaxIndexDataMap extends CoarseGrainDataMap {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(MinMaxIndexDataMap.class.getName());
private String[] indexFilePath;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
index 7f54a0e..f113508 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
@@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.DataMapDistributable;
import org.apache.carbondata.core.datamap.DataMapMeta;
@@ -49,7 +49,7 @@ import org.apache.hadoop.conf.Configuration;
* Min Max DataMap Factory
*/
public class MinMaxIndexDataMapFactory extends CoarseGrainDataMapFactory {
- private static final LogService LOGGER = LogServiceFactory.getLogService(
+ private static final Logger LOGGER = LogServiceFactory.getLogService(
MinMaxIndexDataMapFactory.class.getName());
private DataMapMeta dataMapMeta;
private String dataMapName;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
index 7081fa4..7dcd307 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
@@ -24,7 +24,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -40,6 +39,7 @@ import static org.apache.carbondata.datamap.lucene.LuceneDataMapWriter.flushCach
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
@@ -52,7 +52,7 @@ import org.roaringbitmap.RoaringBitmap;
public class LuceneDataMapBuilder implements DataMapBuilder {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LuceneDataMapWriter.class.getName());
private String dataMapPath;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
index 3179584..68c3bcc 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
@@ -25,7 +25,6 @@ import java.util.Objects;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.DataMapDistributable;
import org.apache.carbondata.core.datamap.DataMapLevel;
@@ -53,6 +52,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.carbondata.events.Event;
+import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -88,7 +88,7 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
/**
* Logger
*/
- final LogService LOGGER = LogServiceFactory.getLogService(this.getClass().getName());
+ final Logger LOGGER = LogServiceFactory.getLogService(this.getClass().getName());
/**
* table's index columns
@@ -281,7 +281,7 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
try {
deleteDatamap();
} catch (MalformedDataMapCommandException ex) {
- LOGGER.error(ex, "failed to delete datamap directory ");
+ LOGGER.error("failed to delete datamap directory ", ex);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
index bdb17ed..9fd9409 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
@@ -27,7 +27,6 @@ import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -42,6 +41,7 @@ import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -73,7 +73,7 @@ public class LuceneDataMapWriter extends DataMapWriter {
/**
* logger
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LuceneDataMapWriter.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
index 63f8d7a..048d41a 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
@@ -25,7 +25,6 @@ import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.dev.DataMapModel;
import org.apache.carbondata.core.datamap.dev.fgdatamap.FineGrainBlocklet;
@@ -41,6 +40,7 @@ import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
@@ -64,7 +64,7 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
/**
* log information
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LuceneFineGrainDataMap.class.getName());
/**
@@ -436,7 +436,7 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
}
}
} catch (IOException e) {
- LOGGER.error(e, "Ignoring the exception, Error while closing the lucene index reader");
+ LOGGER.error("Ignoring the exception, Error while closing the lucene index reader", e);
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
----------------------------------------------------------------------
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
index 9e0f8e5..5dc6b27 100644
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
+++ b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
@@ -27,6 +27,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datamap.DataMapStoreManager
import org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider
import org.apache.carbondata.core.metadata.schema.table.DataMapSchema
@@ -71,7 +72,7 @@ class MVAnalyzerRule(sparkSession: SparkSession) extends Rule[LogicalPlan] {
val modularPlan = catalog.mvSession.sessionState.rewritePlan(plan).withMVTable
if (modularPlan.find (_.rewritten).isDefined) {
val compactSQL = modularPlan.asCompactSQL
- LOGGER.audit(s"\n$compactSQL\n")
+ Audit.log(LOGGER, s"\n$compactSQL\n")
val analyzed = sparkSession.sql(compactSQL).queryExecution.analyzed
analyzed
} else {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
index bc0e280..245d3e8 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
@@ -17,7 +17,6 @@
package org.apache.carbondata.examples.sdk;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
@@ -25,17 +24,22 @@ import org.apache.carbondata.core.scan.expression.ColumnExpression;
import org.apache.carbondata.core.scan.expression.LiteralExpression;
import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression;
import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.sdk.file.*;
+import org.apache.carbondata.sdk.file.CarbonReader;
+import org.apache.carbondata.sdk.file.CarbonWriter;
+import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
+import org.apache.carbondata.sdk.file.Field;
+import org.apache.carbondata.sdk.file.Schema;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.Constants;
+import org.apache.log4j.Logger;
/**
* Example for testing CarbonWriter on S3
*/
public class SDKS3Example {
public static void main(String[] args) throws Exception {
- LogService logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
+ Logger logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
if (args == null || args.length < 3) {
logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
+ "<s3-endpoint> [table-path-on-s3] [rows]");
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
index 1fac673..2462d8d 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
@@ -17,15 +17,15 @@
package org.apache.carbondata.examples.sdk;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.scan.expression.ColumnExpression;
import org.apache.carbondata.core.scan.expression.LiteralExpression;
import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression;
-import org.apache.carbondata.sdk.file.*;
+import org.apache.carbondata.sdk.file.CarbonReader;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
import static org.apache.hadoop.fs.s3a.Constants.ACCESS_KEY;
import static org.apache.hadoop.fs.s3a.Constants.ENDPOINT;
@@ -36,7 +36,7 @@ import static org.apache.hadoop.fs.s3a.Constants.SECRET_KEY;
*/
public class SDKS3ReadExample {
public static void main(String[] args) throws Exception {
- LogService logger = LogServiceFactory.getLogService(SDKS3ReadExample.class.getName());
+ Logger logger = LogServiceFactory.getLogService(SDKS3ReadExample.class.getName());
if (args == null || args.length < 3) {
logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
+ "<s3-endpoint> [table-path-on-s3]");
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
index eb9ff7c..cda8b7a 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
@@ -23,7 +23,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -54,6 +53,7 @@ import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
+import org.apache.log4j.Logger;
/**
* Outputcommitter which manages the segments during loading.It commits segment information to the
@@ -61,7 +61,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
*/
public class CarbonOutputCommitter extends FileOutputCommitter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonOutputCommitter.class.getName());
private ICarbonLock segmentLock;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 5525941..7fd9235 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -34,7 +34,6 @@ import java.util.Set;
import java.util.UUID;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
@@ -93,6 +92,7 @@ import org.apache.hadoop.mapred.TaskAttemptID;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.log4j.Logger;
/**
* This class will create store file based on provided schema
@@ -100,7 +100,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
*/
public class StoreCreator {
- private static LogService LOG =
+ private static final Logger LOG =
LogServiceFactory.getLogService(StoreCreator.class.getCanonicalName());
private AbsoluteTableIdentifier absoluteTableIdentifier;
private String storePath = null;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
index 7641427..ccc0594 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/util/CarbonInputFormatUtil.java
@@ -22,7 +22,6 @@ import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Locale;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
@@ -45,6 +44,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.log4j.Logger;
/**
* Utility class
@@ -54,7 +54,7 @@ public class CarbonInputFormatUtil {
/**
* Attribute for Carbon LOGGER.
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonProperties.class.getName());
public static <V> CarbonTableInputFormat<V> createCarbonInputFormat(
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index d4cf480..9382922 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -20,7 +20,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.exception.InvalidConfigurationException;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -47,12 +46,14 @@ import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Logger;
public class MapredCarbonInputFormat extends CarbonTableInputFormat<ArrayWritable>
implements InputFormat<Void, ArrayWritable>, CombineHiveInputFormat.AvoidSplitCombination {
private static final String CARBON_TABLE = "mapreduce.input.carboninputformat.table";
- private LogService LOGGER = LogServiceFactory.getLogService(this.getClass().getCanonicalName());
+ private static final Logger LOGGER =
+ LogServiceFactory.getLogService(MapredCarbonInputFormat.class.getCanonicalName());
/**
* this method will read the schema from the physical file and populate into CARBON_TABLE
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
index 4f5bb58..51677de 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSource.java
@@ -19,10 +19,8 @@ package org.apache.carbondata.presto;
import java.io.IOException;
import java.util.List;
+import java.util.Objects;
-import static java.util.Objects.requireNonNull;
-
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.presto.readers.PrestoVectorBlockBuilder;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
@@ -35,6 +33,7 @@ import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.LazyBlock;
import com.facebook.presto.spi.block.LazyBlockLoader;
+import org.apache.log4j.Logger;
import static com.google.common.base.Preconditions.checkState;
@@ -43,7 +42,7 @@ import static com.google.common.base.Preconditions.checkState;
*/
class CarbondataPageSource implements ConnectorPageSource {
- private static final LogService logger =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbondataPageSource.class.getName());
private List<ColumnHandle> columnHandles;
private boolean closed;
@@ -132,12 +131,12 @@ class CarbondataPageSource implements ConnectorPageSource {
}
private void closeWithSuppression(Throwable throwable) {
- requireNonNull(throwable, "throwable is null");
+ Objects.requireNonNull(throwable, "throwable is null");
try {
close();
} catch (RuntimeException e) {
// Self-suppression not permitted
- logger.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
if (throwable != e) {
throwable.addSuppressed(e);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
index 5a1e140..6ddee42 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonTableReader.java
@@ -32,9 +32,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static java.util.Objects.requireNonNull;
-
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
@@ -79,6 +76,7 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
+import org.apache.log4j.Logger;
import org.apache.thrift.TBase;
import static org.apache.hadoop.fs.s3a.Constants.ACCESS_KEY;
@@ -127,7 +125,7 @@ public class CarbonTableReader {
/**
* Logger instance
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonTableReader.class.getName());
/**
@@ -136,7 +134,7 @@ public class CarbonTableReader {
private List<String> schemaNames = new ArrayList<>();
@Inject public CarbonTableReader(CarbonTableConfig config) {
- this.config = requireNonNull(config, "CarbonTableConfig is null");
+ this.config = Objects.requireNonNull(config, "CarbonTableConfig is null");
this.carbonCache = new AtomicReference(new HashMap());
tableList = new ConcurrentSet<>();
setS3Properties();
@@ -236,7 +234,7 @@ public class CarbonTableReader {
* @return
*/
public Set<String> getTableNames(String schema) {
- requireNonNull(schema, "schema is null");
+ Objects.requireNonNull(schema, "schema is null");
return updateTableList(schema);
}
@@ -270,7 +268,7 @@ public class CarbonTableReader {
throw new RuntimeException(e);
}
- requireNonNull(schemaTableName, "schemaTableName is null");
+ Objects.requireNonNull(schemaTableName, "schemaTableName is null");
return loadTableMetadata(schemaTableName);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
index 21de2ae..d56b465 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
@@ -49,14 +49,14 @@ class BlockPruneQueryTestCase extends QueryTest with BeforeAndAfterAll {
}
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Build test file for block prune failed")
+ LOGGER.error("Build test file for block prune failed", ex)
} finally {
if (writer != null) {
try {
writer.close()
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Close output stream catching exception")
+ LOGGER.error("Close output stream catching exception", ex)
}
}
}
@@ -102,7 +102,7 @@ class BlockPruneQueryTestCase extends QueryTest with BeforeAndAfterAll {
}
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Delete temp test data file for block prune catching exception")
+ LOGGER.error("Delete temp test data file for block prune catching exception", ex)
}
sql("DROP TABLE IF EXISTS blockprune")
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
index 3865f08..91e4219 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
@@ -53,14 +53,14 @@ class CarbonCustomBlockDistributionTest extends QueryTest with BeforeAndAfterAll
}
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Build test file for block prune failed")
+ LOGGER.error("Build test file for block prune failed", ex)
} finally {
if (writer != null) {
try {
writer.close()
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Close output stream catching exception")
+ LOGGER.error("Close output stream catching exception", ex)
}
}
}
@@ -107,7 +107,7 @@ class CarbonCustomBlockDistributionTest extends QueryTest with BeforeAndAfterAll
}
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Delete temp test data file for block prune catching exception")
+ LOGGER.error("Delete temp test data file for block prune catching exception", ex)
}
sql("DROP TABLE IF EXISTS blockprune")
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
index 1c8e2d2..3aa7fbf 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClient.java
@@ -18,14 +18,15 @@ package org.apache.carbondata.spark.dictionary.client;
import java.nio.charset.Charset;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.dictionary.client.DictionaryClient;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
import com.google.common.collect.Lists;
import io.netty.channel.nio.NioEventLoopGroup;
+import org.apache.log4j.Logger;
import org.apache.spark.SecurityManager;
import org.apache.spark.SparkConf;
import org.apache.spark.network.TransportContext;
@@ -41,7 +42,7 @@ import org.apache.spark.network.util.TransportConf;
*/
public class SecureDictionaryClient implements DictionaryClient {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SecureDictionaryClient.class.getName());
private SecureDictionaryClientHandler dictionaryClientHandler =
@@ -59,7 +60,7 @@ public class SecureDictionaryClient implements DictionaryClient {
*/
@Override public void startClient(String secretKey, String address, int port,
boolean encryptSecureServer) {
- LOGGER.audit("Starting client on " + address + " " + port);
+ Audit.log(LOGGER, "Starting client on " + address + " " + port);
long start = System.currentTimeMillis();
SecurityManager securityMgr;
@@ -91,7 +92,7 @@ public class SecureDictionaryClient implements DictionaryClient {
try {
client = clientFactory.createClient(address, port);
} catch (Exception e) {
- LOGGER.error(e, "Dictionary Client Failed to bind to port:");
+ LOGGER.error("Dictionary Client Failed to bind to port:", e);
}
LOGGER.info(
"Dictionary client Started, Total time spent : " + (System.currentTimeMillis() - start));
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
index cdf2553..d3f27ed 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/client/SecureDictionaryClientHandler.java
@@ -20,13 +20,13 @@ import java.nio.ByteBuffer;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.Unpooled;
+import org.apache.log4j.Logger;
import org.apache.spark.network.client.RpcResponseCallback;
import org.apache.spark.network.client.TransportClient;
import org.apache.spark.network.server.OneForOneStreamManager;
@@ -38,7 +38,7 @@ import org.apache.spark.network.server.StreamManager;
*/
public class SecureDictionaryClientHandler extends RpcHandler {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SecureDictionaryClientHandler.class.getName());
private final BlockingQueue<DictionaryMessage> responseMsgQueue = new LinkedBlockingQueue<>();
@@ -58,7 +58,7 @@ public class SecureDictionaryClientHandler extends RpcHandler {
key.writeData(buffer);
resp = client.sendRpcSync(buffer.nioBuffer(), 100000);
} catch (Exception e) {
- LOGGER.error(e, "Error while send request to server ");
+ LOGGER.error("Error while send request to server ", e);
}
try {
if (resp == null) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
index 995e520..a029da0 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
@@ -19,8 +19,8 @@ package org.apache.carbondata.spark.dictionary.server;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessageType;
@@ -33,6 +33,7 @@ import com.google.common.collect.Lists;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
import org.apache.spark.SecurityManager;
import org.apache.spark.SparkConf;
import org.apache.spark.network.TransportContext;
@@ -47,7 +48,7 @@ import scala.Some;
*/
public class SecureDictionaryServer extends AbstractDictionaryServer implements DictionaryServer {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SecureDictionaryServer.class.getName());
private SecureDictionaryServerHandler secureDictionaryServerHandler;
@@ -73,10 +74,8 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
return null;
}
});
- } catch (IOException io) {
- LOGGER.error(io, "Failed to start Dictionary Server in secure mode");
- } catch (InterruptedException ie) {
- LOGGER.error(ie, "Failed to start Dictionary Server in secure mode");
+ } catch (IOException | InterruptedException io) {
+ LOGGER.error("Failed to start Dictionary Server in secure mode", io);
}
}
@@ -146,13 +145,14 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
//iteratively listening to newports
context
.createServer(host, newPort, Lists.<TransportServerBootstrap>newArrayList(bootstrap));
- LOGGER.audit("Dictionary Server started, Time spent " + (System.currentTimeMillis() - start)
+ Audit.log(LOGGER,
+ "Dictionary Server started, Time spent " + (System.currentTimeMillis() - start)
+ " Listening on port " + newPort);
this.port = newPort;
this.host = host;
break;
} catch (Exception e) {
- LOGGER.error(e, "Dictionary Server Failed to bind to port: " + newPort);
+ LOGGER.error("Dictionary Server Failed to bind to port: " + newPort, e);
if (i == 9) {
throw new RuntimeException("Dictionary Server Could not bind to any port");
}
@@ -209,14 +209,12 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
return null;
}
});
- } catch (IOException io) {
- LOGGER.error(io, "Failed to stop Dictionary Server in secure mode");
- } catch (InterruptedException ie) {
- LOGGER.error(ie, "Failed to stop Dictionary Server in secure mode");
+ } catch (IOException | InterruptedException e) {
+ LOGGER.error("Failed to stop Dictionary Server in secure mode", e);
}
}
- public void initializeDictionaryGenerator(CarbonTable carbonTable) throws Exception {
+ public void initializeDictionaryGenerator(CarbonTable carbonTable) {
secureDictionaryServerHandler.initializeTable(carbonTable);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
index aaa4cf0..9e291a4 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServerHandler.java
@@ -18,7 +18,6 @@ package org.apache.carbondata.spark.dictionary.server;
import java.nio.ByteBuffer;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.dictionary.generator.ServerDictionaryGenerator;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
@@ -28,6 +27,7 @@ import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandler;
+import org.apache.log4j.Logger;
import org.apache.spark.network.client.RpcResponseCallback;
import org.apache.spark.network.client.TransportClient;
import org.apache.spark.network.server.OneForOneStreamManager;
@@ -39,7 +39,7 @@ import org.apache.spark.network.server.StreamManager;
*/
@ChannelHandler.Sharable public class SecureDictionaryServerHandler extends RpcHandler {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SecureDictionaryServerHandler.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
index 3864b5d..df173cd 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
@@ -29,6 +29,7 @@ import org.apache.spark.unsafe.types.UTF8String
import org.apache.carbondata.common.Strings
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.filesystem.CarbonFile
import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -158,7 +159,7 @@ object CarbonStore {
carbonTable: CarbonTable,
forceTableClean: Boolean,
currentTablePartitions: Option[Seq[PartitionSpec]] = None): Unit = {
- LOGGER.audit(s"The clean files request has been received for $dbName.$tableName")
+ Audit.log(LOGGER, s"The clean files request has been received for $dbName.$tableName")
var carbonCleanFilesLock: ICarbonLock = null
val absoluteTableIdentifier = if (forceTableClean) {
AbsoluteTableIdentifier.from(tablePath, dbName, tableName, tableName)
@@ -202,7 +203,7 @@ object CarbonStore {
CarbonLockUtil.fileUnlock(carbonCleanFilesLock, LockUsage.CLEAN_FILES_LOCK)
}
}
- LOGGER.audit(s"Clean files operation is success for $dbName.$tableName.")
+ Audit.log(LOGGER, s"Clean files operation is success for $dbName.$tableName.")
}
/**
@@ -281,7 +282,7 @@ object CarbonStore {
tableName: String,
carbonTable: CarbonTable): Unit = {
- LOGGER.audit(s"Delete segment by Id request has been received for $dbName.$tableName")
+ Audit.log(LOGGER, s"Delete segment by Id request has been received for $dbName.$tableName")
validateLoadIds(loadids)
val path = carbonTable.getMetadataPath
@@ -290,7 +291,7 @@ object CarbonStore {
val invalidLoadIds = SegmentStatusManager.updateDeletionStatus(
carbonTable.getAbsoluteTableIdentifier, loadids.asJava, path).asScala
if (invalidLoadIds.isEmpty) {
- LOGGER.audit(s"Delete segment by Id is successfull for $dbName.$tableName.")
+ Audit.log(LOGGER, s"Delete segment by Id is successfull for $dbName.$tableName.")
} else {
sys.error(s"Delete segment by Id is failed. Invalid ID is: ${invalidLoadIds.mkString(",")}")
}
@@ -307,7 +308,7 @@ object CarbonStore {
dbName: String,
tableName: String,
carbonTable: CarbonTable): Unit = {
- LOGGER.audit(s"Delete segment by Id request has been received for $dbName.$tableName")
+ Audit.log(LOGGER, s"Delete segment by Id request has been received for $dbName.$tableName")
val time = validateTimeFormat(timestamp)
val path = carbonTable.getMetadataPath
@@ -320,7 +321,7 @@ object CarbonStore {
path,
time).asScala
if (invalidLoadTimestamps.isEmpty) {
- LOGGER.audit(s"Delete segment by date is successful for $dbName.$tableName.")
+ Audit.log(LOGGER, s"Delete segment by date is successful for $dbName.$tableName.")
} else {
sys.error("Delete segment by date is failed. No matching segment found.")
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
index f1a12bf..f5c65b3 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
@@ -271,11 +271,11 @@ object DataLoadProcessorStepOnSpark {
}
} catch {
case e: CarbonDataWriterException =>
- LOGGER.error(e, "Failed for table: " + tableName + " in Data Writer Step")
+ LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
throw new CarbonDataLoadingException("Error while initializing data handler : " +
e.getMessage)
case e: Exception =>
- LOGGER.error(e, "Failed for table: " + tableName + " in Data Writer Step")
+ LOGGER.error("Failed for table: " + tableName + " in Data Writer Step", e)
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage, e)
} finally {
if (rowConverter != null) {
@@ -316,11 +316,11 @@ object DataLoadProcessorStepOnSpark {
e match {
case e: CarbonDataLoadingException => throw e
case e: TextParsingException =>
- LOGGER.error(e, "Data Loading failed for table " + model.getTableName)
+ LOGGER.error("Data Loading failed for table " + model.getTableName, e)
throw new CarbonDataLoadingException("Data Loading failed for table " + model.getTableName,
e)
case e: Exception =>
- LOGGER.error(e, "Data Loading failed for table " + model.getTableName)
+ LOGGER.error("Data Loading failed for table " + model.getTableName, e)
throw new CarbonDataLoadingException("Data Loading failed for table " + model.getTableName,
e)
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
index ffd20b1..b3eb4f5 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/AlterTableDropColumnRDD.scala
@@ -71,7 +71,7 @@ class AlterTableDropColumnRDD[K, V](
}
} catch {
case ex: Exception =>
- LOGGER.error(ex, ex.getMessage)
+ LOGGER.error(ex.getMessage, ex)
throw ex
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
index 0c30186..d01caee 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
@@ -226,7 +226,7 @@ class CarbonMergerRDD[K, V](
} catch {
case e: Exception =>
- LOGGER.error(e, "Compaction Failed ")
+ LOGGER.error("Compaction Failed ", e)
throw e
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
index 6076e4a..ab8bb8b 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
@@ -24,12 +24,11 @@ import java.util.{Date, UUID}
import scala.collection.mutable
import scala.util.Random
-import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapreduce.{TaskAttemptID, TaskType}
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
-import org.apache.spark.{Partition, SerializableWritable, SparkContext, SparkEnv, TaskContext}
+import org.apache.spark.{Partition, SparkEnv, TaskContext}
import org.apache.spark.rdd.{DataLoadCoalescedRDD, DataLoadPartitionWrap, RDD}
import org.apache.spark.serializer.SerializerInstance
import org.apache.spark.sql.{Row, SparkSession}
@@ -38,13 +37,11 @@ import org.apache.spark.util.SparkUtil
import org.apache.carbondata.common.CarbonIterator
import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.common.logging.impl.StandardLogService
import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.datastore.compression.CompressorFactory
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.metadata.datatype.DataTypes
import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus}
-import org.apache.carbondata.core.util.{CarbonProperties, CarbonTimeStatisticsFactory, ThreadLocalSessionInfo, ThreadLocalTaskInfo}
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonTimeStatisticsFactory, ThreadLocalTaskInfo}
import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.processing.loading.{DataLoadExecutor, FailureCauses, TableProcessingOperations}
import org.apache.carbondata.processing.loading.csvinput.{BlockDetails, CSVInputFormat, CSVRecordReaderIterator}
@@ -221,9 +218,6 @@ class NewCarbonDataLoadRDD[K, V](
CarbonQueryUtil.splitFilePath(carbonLoadModel.getFactFilePath, fileList, ",")
model = carbonLoadModel.getCopyWithPartition(
carbonLoadModel.getCsvHeader, carbonLoadModel.getCsvDelimiter)
- StandardLogService.setThreadName(StandardLogService
- .getPartitionID(model.getCarbonDataLoadSchema.getCarbonTable.getTableUniqueName)
- , ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId + "")
val readers =
split.nodeBlocksDetail.map(format.createRecordReader(_, hadoopAttemptContext))
readers.zipWithIndex.map { case (reader, index) =>
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
index 6a4577f..6911b0b 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionDropper.scala
@@ -22,7 +22,9 @@ import java.io.IOException
import org.apache.spark.sql.execution.command.{AlterPartitionModel, DropPartitionCallableModel}
import org.apache.spark.util.PartitionUtils
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.metadata.schema.partition.PartitionType
import org.apache.carbondata.spark.{AlterPartitionResultImpl, PartitionFactory}
@@ -87,7 +89,7 @@ object PartitionDropper {
finalDropStatus = dropStatus.forall(_._2)
}
if (!finalDropStatus) {
- logger.audit(s"Drop Partition request failed for table " +
+ Audit.log(logger, s"Drop Partition request failed for table " +
s"${ dbName }.${ tableName }")
logger.error(s"Drop Partition request failed for table " +
s"${ dbName }.${ tableName }")
@@ -103,7 +105,7 @@ object PartitionDropper {
case e: IOException => sys.error(s"Exception while delete original carbon files " +
e.getMessage)
}
- logger.audit(s"Drop Partition request completed for table " +
+ Audit.log(logger, s"Drop Partition request completed for table " +
s"${ dbName }.${ tableName }")
logger.info(s"Drop Partition request completed for table " +
s"${ dbName }.${ tableName }")
@@ -114,7 +116,7 @@ object PartitionDropper {
} else {
PartitionUtils.deleteOriginalCarbonFile(alterPartitionModel, absoluteTableIdentifier,
Seq(partitionId).toList, dbName, tableName, partitionInfo)
- logger.audit(s"Drop Partition request completed for table " +
+ Audit.log(logger, s"Drop Partition request completed for table " +
s"${ dbName }.${ tableName }")
logger.info(s"Drop Partition request completed for table " +
s"${ dbName }.${ tableName }")
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
index 0d437f6..ca9f049 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/PartitionSplitter.scala
@@ -22,7 +22,9 @@ import java.io.IOException
import org.apache.spark.sql.execution.command.{AlterPartitionModel, SplitPartitionCallableModel}
import org.apache.spark.util.PartitionUtils
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.spark.{AlterPartitionResultImpl, PartitionFactory}
object PartitionSplitter {
@@ -73,7 +75,7 @@ object PartitionSplitter {
finalSplitStatus = splitStatus.forall(_._2)
}
if (!finalSplitStatus) {
- logger.audit(s"Add/Split Partition request failed for table " +
+ Audit.log(logger, s"Add/Split Partition request failed for table " +
s"${ databaseName }.${ tableName }")
logger.error(s"Add/Split Partition request failed for table " +
s"${ databaseName }.${ tableName }")
@@ -88,7 +90,7 @@ object PartitionSplitter {
case e: IOException => sys.error(s"Exception while delete original carbon files " +
e.getMessage)
}
- logger.audit(s"Add/Split Partition request completed for table " +
+ Audit.log(logger, s"Add/Split Partition request completed for table " +
s"${ databaseName }.${ tableName }")
logger.info(s"Add/Split Partition request completed for table " +
s"${ databaseName }.${ tableName }")
[2/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j
Logger directly
Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
index 22ff5c4..1f1e7bd 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
@@ -25,7 +25,8 @@ import org.apache.spark.sql.hive.CarbonSessionCatalog
import org.apache.spark.util.AlterTableUtil
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.features.TableOperation
import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage}
import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
@@ -39,11 +40,11 @@ private[sql] case class CarbonAlterTableAddColumnCommand(
extends MetadataCommand {
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val tableName = alterTableAddColumnsModel.tableName
val dbName = alterTableAddColumnsModel.databaseName
.getOrElse(sparkSession.catalog.currentDatabase)
- LOGGER.audit(s"Alter table add columns request has been received for $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table add columns request has been received for $dbName.$tableName")
val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.COMPACTION_LOCK)
var locks = List.empty[ICarbonLock]
var timeStamp = 0L
@@ -104,10 +105,10 @@ private[sql] case class CarbonAlterTableAddColumnCommand(
carbonTable, alterTableAddColumnsModel)
OperationListenerBus.getInstance.fireEvent(alterTablePostExecutionEvent, operationContext)
LOGGER.info(s"Alter table for add columns is successful for table $dbName.$tableName")
- LOGGER.audit(s"Alter table for add columns is successful for table $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table for add columns is successful for table $dbName.$tableName")
} catch {
case e: Exception =>
- LOGGER.error(e, "Alter table add columns failed")
+ LOGGER.error("Alter table add columns failed", e)
if (newCols.nonEmpty) {
LOGGER.info("Cleaning up the dictionary files as alter table add operation failed")
new AlterTableDropColumnRDD(sparkSession,
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
index 9ce79e9..716b9c9 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
@@ -25,7 +25,8 @@ import org.apache.spark.sql.hive.CarbonSessionCatalog
import org.apache.spark.util.AlterTableUtil
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.features.TableOperation
import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage}
import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
@@ -40,11 +41,12 @@ private[sql] case class CarbonAlterTableDataTypeChangeCommand(
extends MetadataCommand {
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val tableName = alterTableDataTypeChangeModel.tableName
val dbName = alterTableDataTypeChangeModel.databaseName
.getOrElse(sparkSession.catalog.currentDatabase)
- LOGGER.audit(s"Alter table change data type request has been received for $dbName.$tableName")
+ Audit.log(LOGGER,
+ s"Alter table change data type request has been received for $dbName.$tableName")
val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.COMPACTION_LOCK)
var locks = List.empty[ICarbonLock]
// get the latest carbon table and check for column existence
@@ -68,7 +70,7 @@ private[sql] case class CarbonAlterTableDataTypeChangeCommand(
val columnName = alterTableDataTypeChangeModel.columnName
val carbonColumns = carbonTable.getCreateOrderColumn(tableName).asScala.filter(!_.isInvisible)
if (!carbonColumns.exists(_.getColName.equalsIgnoreCase(columnName))) {
- LOGGER.audit(s"Alter table change data type request has failed. " +
+ Audit.log(LOGGER, s"Alter table change data type request has failed. " +
s"Column $columnName does not exist")
throwMetadataException(dbName, tableName, s"Column does not exist: $columnName")
}
@@ -76,7 +78,7 @@ private[sql] case class CarbonAlterTableDataTypeChangeCommand(
if (carbonColumn.size == 1) {
validateColumnDataType(alterTableDataTypeChangeModel.dataTypeInfo, carbonColumn.head)
} else {
- LOGGER.audit(s"Alter table change data type request has failed. " +
+ Audit.log(LOGGER, s"Alter table change data type request has failed. " +
s"Column $columnName is invalid")
throwMetadataException(dbName, tableName, s"Invalid Column: $columnName")
}
@@ -116,7 +118,8 @@ private[sql] case class CarbonAlterTableDataTypeChangeCommand(
alterTableDataTypeChangeModel)
OperationListenerBus.getInstance.fireEvent(alterTablePostExecutionEvent, operationContext)
LOGGER.info(s"Alter table for data type change is successful for table $dbName.$tableName")
- LOGGER.audit(s"Alter table for data type change is successful for table $dbName.$tableName")
+ Audit.log(LOGGER,
+ s"Alter table for data type change is successful for table $dbName.$tableName")
} catch {
case e: Exception =>
LOGGER.error("Alter table change datatype failed : " + e.getMessage)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
index 1dbe28c..d601ed6 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
@@ -26,14 +26,13 @@ import org.apache.spark.sql.hive.CarbonSessionCatalog
import org.apache.spark.util.AlterTableUtil
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.features.TableOperation
import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage}
import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
import org.apache.carbondata.core.metadata.encoder.Encoding
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
-import org.apache.carbondata.core.util.CarbonUtil
-import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.events.{AlterTableDropColumnPostEvent, AlterTableDropColumnPreEvent, OperationContext, OperationListenerBus}
import org.apache.carbondata.format.SchemaEvolutionEntry
import org.apache.carbondata.spark.rdd.AlterTableDropColumnRDD
@@ -43,11 +42,11 @@ private[sql] case class CarbonAlterTableDropColumnCommand(
extends MetadataCommand {
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val tableName = alterTableDropColumnModel.tableName
val dbName = alterTableDropColumnModel.databaseName
.getOrElse(sparkSession.catalog.currentDatabase)
- LOGGER.audit(s"Alter table drop columns request has been received for $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table drop columns request has been received for $dbName.$tableName")
var locks = List.empty[ICarbonLock]
var timeStamp = 0L
val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.COMPACTION_LOCK)
@@ -162,7 +161,7 @@ private[sql] case class CarbonAlterTableDropColumnCommand(
OperationListenerBus.getInstance().fireEvent(alterTableDropColumnPostEvent, operationContext)
LOGGER.info(s"Alter table for drop columns is successful for table $dbName.$tableName")
- LOGGER.audit(s"Alter table for drop columns is successful for table $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table for drop columns is successful for table $dbName.$tableName")
} catch {
case e: Exception =>
LOGGER.error("Alter table drop columns failed : " + e.getMessage)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
index f1e17bd..a1c68a3 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
@@ -27,12 +27,11 @@ import org.apache.spark.sql.hive.{CarbonRelation, CarbonSessionCatalog}
import org.apache.spark.util.AlterTableUtil
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datamap.DataMapStoreManager
-import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.exception.ConcurrentOperationException
import org.apache.carbondata.core.features.TableOperation
-import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage}
import org.apache.carbondata.core.metadata.CarbonTableIdentifier
import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, DataMapSchema}
import org.apache.carbondata.core.statusmanager.SegmentStatusManager
@@ -44,7 +43,7 @@ private[sql] case class CarbonAlterTableRenameCommand(
extends MetadataCommand {
override def processMetadata(sparkSession: SparkSession): Seq[Nothing] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val oldTableIdentifier = alterTableRenameModel.oldTableIdentifier
val newTableIdentifier = alterTableRenameModel.newTableIdentifier
val oldDatabaseName = oldTableIdentifier.database
@@ -61,14 +60,14 @@ private[sql] case class CarbonAlterTableRenameCommand(
}
val oldTableName = oldTableIdentifier.table.toLowerCase
val newTableName = newTableIdentifier.table.toLowerCase
- LOGGER.audit(s"Rename table request has been received for $oldDatabaseName.$oldTableName")
+ Audit.log(LOGGER, s"Rename table request has been received for $oldDatabaseName.$oldTableName")
LOGGER.info(s"Rename table request has been received for $oldDatabaseName.$oldTableName")
val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
val relation: CarbonRelation =
metastore.lookupRelation(oldTableIdentifier.database, oldTableName)(sparkSession)
.asInstanceOf[CarbonRelation]
if (relation == null) {
- LOGGER.audit(s"Rename table request has failed. " +
+ Audit.log(LOGGER, s"Rename table request has failed. " +
s"Table $oldDatabaseName.$oldTableName does not exist")
throwMetadataException(oldDatabaseName, oldTableName, "Table does not exist")
}
@@ -163,13 +162,13 @@ private[sql] case class CarbonAlterTableRenameCommand(
OperationListenerBus.getInstance().fireEvent(alterTableRenamePostEvent, operationContext)
sparkSession.catalog.refreshTable(newIdentifier.quotedString)
- LOGGER.audit(s"Table $oldTableName has been successfully renamed to $newTableName")
+ Audit.log(LOGGER, s"Table $oldTableName has been successfully renamed to $newTableName")
LOGGER.info(s"Table $oldTableName has been successfully renamed to $newTableName")
} catch {
case e: ConcurrentOperationException =>
throw e
case e: Exception =>
- LOGGER.error(e, "Rename table failed: " + e.getMessage)
+ LOGGER.error("Rename table failed: " + e.getMessage, e)
if (carbonTable != null) {
AlterTableUtil.revertRenameTableChanges(
newTableName,
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
index 19c265d..3252f1d 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
@@ -24,7 +24,9 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.command.AtomicRunnableCommand
import org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.metadata.schema.table.TableInfo
/**
@@ -53,12 +55,12 @@ case class CarbonCreateTableAsSelectCommand(
databaseOpt = Some(tableInfo.getDatabaseName)
}
val dbName = CarbonEnv.getDatabaseName(databaseOpt)(sparkSession)
- LOGGER.audit(s"Request received for CTAS for $dbName.$tableName")
+ Audit.log(LOGGER, s"Request received for CTAS for $dbName.$tableName")
// check if table already exists
if (sparkSession.sessionState.catalog.listTables(dbName)
.exists(_.table.equalsIgnoreCase(tableName))) {
if (!ifNotExistsSet) {
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Table creation with Database name [$dbName] and Table name [$tableName] failed. " +
s"Table [$tableName] already exists under database [$dbName]")
throw new TableAlreadyExistsException(dbName, tableName)
@@ -96,7 +98,7 @@ case class CarbonCreateTableAsSelectCommand(
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
loadCommand.processData(sparkSession)
val carbonTable = loadCommand.relation.carbonTable
- LOGGER.audit(s"CTAS operation completed successfully for " +
+ Audit.log(LOGGER, s"CTAS operation completed successfully for " +
s"${carbonTable.getDatabaseName}.${carbonTable.getTableName}")
}
Seq.empty
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
index 42ea0bd..5d039bf 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
@@ -25,7 +25,9 @@ import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
import org.apache.spark.sql.execution.SQLExecution.EXECUTION_ID_KEY
import org.apache.spark.sql.execution.command.MetadataCommand
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.compression.CompressorFactory
import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -60,12 +62,12 @@ case class CarbonCreateTableCommand(
// set dbName and tableUnique Name in the table info
tableInfo.setDatabaseName(dbName)
tableInfo.setTableUniqueName(CarbonTable.buildUniqueName(dbName, tableName))
- LOGGER.audit(s"Creating Table with Database name [$dbName] and Table name [$tableName]")
+ Audit.log(LOGGER, s"Creating Table with Database name [$dbName] and Table name [$tableName]")
val isTransactionalTable = tableInfo.isTransactionalTable
if (sparkSession.sessionState.catalog.listTables(dbName)
.exists(_.table.equalsIgnoreCase(tableName))) {
if (!ifNotExistsSet) {
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Table creation with Database name [$dbName] and Table name [$tableName] failed. " +
s"Table [$tableName] already exists under database [$dbName]")
throw new TableAlreadyExistsException(dbName, tableName)
@@ -178,15 +180,15 @@ case class CarbonCreateTableCommand(
case _: Exception => // No operation
}
val msg = s"Create table'$tableName' in database '$dbName' failed"
- LOGGER.audit(msg.concat(", ").concat(e.getMessage))
- LOGGER.error(e, msg)
+ Audit.log(LOGGER, msg.concat(", ").concat(e.getMessage))
+ LOGGER.error(msg, e)
throwMetadataException(dbName, tableName, msg.concat(", ").concat(e.getMessage))
}
}
val createTablePostExecutionEvent: CreateTablePostExecutionEvent =
CreateTablePostExecutionEvent(sparkSession, tableIdentifier)
OperationListenerBus.getInstance.fireEvent(createTablePostExecutionEvent, operationContext)
- LOGGER.audit(s"Table created with Database name [$dbName] and Table name [$tableName]")
+ Audit.log(LOGGER, s"Table created with Database name [$dbName] and Table name [$tableName]")
}
Seq.empty
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
index e4b298f..e8cb689 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
@@ -26,11 +26,11 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.execution.command.AtomicRunnableCommand
import org.apache.spark.sql.execution.command.datamap.CarbonDropDataMapCommand
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.cache.dictionary.ManageDictionaryAndBTree
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.DataMapStoreManager
-import org.apache.carbondata.core.datamap.status.DataMapStatusManager
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.exception.ConcurrentOperationException
import org.apache.carbondata.core.locks.{CarbonLockFactory, CarbonLockUtil, ICarbonLock, LockUsage}
@@ -51,7 +51,7 @@ case class CarbonDropTableCommand(
var childDropDataMapCommands : Seq[CarbonDropDataMapCommand] = Seq.empty
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val dbName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
val carbonLocks: scala.collection.mutable.ListBuffer[ICarbonLock] = ListBuffer()
@@ -71,7 +71,7 @@ case class CarbonDropTableCommand(
if (SegmentStatusManager.isLoadInProgressInTable(carbonTable)) {
throw new ConcurrentOperationException(carbonTable, "loading", "drop table")
}
- LOGGER.audit(s"Deleting table [$tableName] under database [$dbName]")
+ Audit.log(LOGGER, s"Deleting table [$tableName] under database [$dbName]")
if (carbonTable.isStreamingSink) {
// streaming table should acquire streaming.lock
carbonLocks += CarbonLockUtil.getLockObject(identifier, LockUsage.STREAMING_LOCK)
@@ -142,7 +142,7 @@ case class CarbonDropTableCommand(
ifExistsSet,
sparkSession)
OperationListenerBus.getInstance.fireEvent(dropTablePostEvent, operationContext)
- LOGGER.audit(s"Deleted table [$tableName] under database [$dbName]")
+ Audit.log(LOGGER, s"Deleted table [$tableName] under database [$dbName]")
} catch {
case ex: NoSuchTableException =>
@@ -153,7 +153,7 @@ case class CarbonDropTableCommand(
throw ex
case ex: Exception =>
val msg = s"Dropping table $dbName.$tableName failed: ${ex.getMessage}"
- LOGGER.error(ex, msg)
+ LOGGER.error(msg, ex)
throwMetadataException(dbName, tableName, msg)
} finally {
if (carbonLocks.nonEmpty) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index f9046f0..6e183b2 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -54,8 +54,7 @@ object MatchCreateDataSourceTable {
}
class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
- val LOGGER: LogService =
- LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
def apply(plan: LogicalPlan): Seq[SparkPlan] = {
plan match {
case LoadDataCommand(identifier, path, isLocal, isOverwrite, partition)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
index 1622724..5165526 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
@@ -37,7 +37,9 @@ import org.apache.spark.sql.execution.command.stream.{CarbonCreateStreamCommand,
import org.apache.spark.sql.util.CarbonException
import org.apache.spark.util.CarbonReflectionUtils
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.spark.CarbonOption
import org.apache.carbondata.spark.util.{CarbonScalaUtil, CommonUtil}
@@ -530,7 +532,7 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
if (name.startsWith("default.value.") &&
fields.count(p => p.column.equalsIgnoreCase(colName)) == 1) {
LOGGER.error(s"Duplicate default value exist for new column: ${ colName }")
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Validation failed for Create/Alter Table Operation " +
s"for ${ table }. " +
s"Duplicate default value exist for new column: ${ colName }")
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
index cab9de5..20cffa7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
@@ -29,8 +29,10 @@ import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.hive.{CarbonRelation, CarbonSessionCatalog}
import org.apache.spark.sql.hive.HiveExternalCatalog._
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.DataMapStoreManager
import org.apache.carbondata.core.datastore.block.SegmentPropertiesAndSchemaHolder
@@ -68,7 +70,7 @@ object AlterTableUtil {
.lookupRelation(Option(dbName), tableName)(sparkSession)
.asInstanceOf[CarbonRelation]
if (relation == null) {
- LOGGER.audit(s"Alter table request has failed. " +
+ Audit.log(LOGGER, s"Alter table request has failed. " +
s"Table $dbName.$tableName does not exist")
sys.error(s"Table $dbName.$tableName does not exist")
}
@@ -292,7 +294,7 @@ object AlterTableUtil {
(sparkSession: SparkSession, catalog: CarbonSessionCatalog): Unit = {
val tableName = tableIdentifier.table
val dbName = tableIdentifier.database.getOrElse(sparkSession.catalog.currentDatabase)
- LOGGER.audit(s"Alter table newProperties request has been received for $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table newProperties request has been received for $dbName.$tableName")
val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.COMPACTION_LOCK)
var locks = List.empty[ICarbonLock]
try {
@@ -378,10 +380,10 @@ object AlterTableUtil {
propKeys,
set)
LOGGER.info(s"Alter table newProperties is successful for table $dbName.$tableName")
- LOGGER.audit(s"Alter table newProperties is successful for table $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table newProperties is successful for table $dbName.$tableName")
} catch {
case e: Exception =>
- LOGGER.error(e, "Alter table newProperties failed")
+ LOGGER.error("Alter table newProperties failed", e)
sys.error(s"Alter table newProperties operation failed: ${e.getMessage}")
} finally {
// release lock after command execution completion
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/datamap/DataMapWriterListener.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datamap/DataMapWriterListener.java b/processing/src/main/java/org/apache/carbondata/processing/datamap/DataMapWriterListener.java
index 55a251d..e88c422 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datamap/DataMapWriterListener.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datamap/DataMapWriterListener.java
@@ -24,7 +24,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.DataMapMeta;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
@@ -39,12 +38,14 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
import org.apache.carbondata.processing.store.TablePage;
+import org.apache.log4j.Logger;
+
/**
* It is for writing DataMap for one table
*/
public class DataMapWriterListener {
- private static final LogService LOG = LogServiceFactory.getLogService(
+ private static final Logger LOG = LogServiceFactory.getLogService(
DataMapWriterListener.class.getCanonicalName());
// list indexed column -> list of data map writer
@@ -68,7 +69,7 @@ public class DataMapWriterListener {
try {
tableIndices = DataMapStoreManager.getInstance().getAllDataMap(carbonTable);
} catch (IOException e) {
- LOG.error(e, "Error while retrieving datamaps");
+ LOG.error("Error while retrieving datamaps", e);
throw new RuntimeException(e);
}
if (tableIndices != null) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
index 4e2b871..c12ed1c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
@@ -27,7 +27,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
@@ -61,12 +60,14 @@ import org.apache.carbondata.processing.loading.dictionary.PreCreatedDictionary;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* Primitive DataType stateless object used in data loading
*/
public class PrimitiveDataType implements GenericDataType<Object> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(PrimitiveDataType.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java b/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
index 69f79f8..b93f21d 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
@@ -21,12 +21,13 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.processing.datamap.DataMapWriterListener;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
+import org.apache.log4j.Logger;
+
/**
* This base abstract class for data loading.
* It can do transformation jobs as per the implementation.
@@ -38,7 +39,7 @@ import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
*/
public abstract class AbstractDataLoadProcessorStep {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AbstractDataLoadProcessorStep.class.getName());
protected CarbonDataLoadConfiguration configuration;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
index 58ec0d5..e0dcd26 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
@@ -27,7 +27,6 @@ import java.nio.charset.Charset;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -35,12 +34,14 @@ import org.apache.carbondata.core.datastore.impl.FileFactory.FileType;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
+import org.apache.log4j.Logger;
+
public class BadRecordsLogger {
/**
* Comment for <code>LOGGER</code>
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BadRecordsLogger.class.getName());
/**
* Which holds the key and if any bad rec found to check from API to update
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadExecutor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadExecutor.java b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadExecutor.java
index fc5c41f..6550afe 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadExecutor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadExecutor.java
@@ -18,7 +18,6 @@
package org.apache.carbondata.processing.loading;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
import org.apache.carbondata.processing.loading.exception.BadRecordFoundException;
@@ -27,12 +26,14 @@ import org.apache.carbondata.processing.loading.exception.NoRetryException;
import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
import org.apache.carbondata.processing.util.CarbonBadRecordUtil;
+import org.apache.log4j.Logger;
+
/**
* It executes the data load.
*/
public class DataLoadExecutor {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataLoadExecutor.class.getName());
private AbstractDataLoadProcessorStep loadProcessorStep;
@@ -62,7 +63,7 @@ public class DataLoadExecutor {
throw e;
}
} catch (Exception e) {
- LOGGER.error(e, "Data Loading failed for table " + loadModel.getTableName());
+ LOGGER.error("Data Loading failed for table " + loadModel.getTableName(), e);
throw new CarbonDataLoadingException(
"Data Loading failed for table " + loadModel.getTableName(), e);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
index f89bc2f..f89a4e7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
@@ -51,12 +50,13 @@ import org.apache.carbondata.processing.loading.steps.SortProcessorStepImpl;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
/**
* It builds the pipe line of steps for loading data to carbon.
*/
public final class DataLoadProcessBuilder {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataLoadProcessBuilder.class.getName());
public AbstractDataLoadProcessorStep build(CarbonLoadModel loadModel, String[] storeLocation,
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/TableProcessingOperations.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/TableProcessingOperations.java b/processing/src/main/java/org/apache/carbondata/processing/loading/TableProcessingOperations.java
index 5bed8b1..89da224 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/TableProcessingOperations.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/TableProcessingOperations.java
@@ -23,7 +23,6 @@ import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
@@ -40,9 +39,10 @@ import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import org.apache.carbondata.processing.util.CarbonLoaderUtil;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
public class TableProcessingOperations {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonLoaderUtil.class.getName());
/**
@@ -136,7 +136,7 @@ public class TableProcessingOperations {
try {
CarbonUtil.deleteFoldersAndFiles(new File(loc));
} catch (IOException | InterruptedException e) {
- LOGGER.error(e, "Failed to delete local data load folder location: " + loc);
+ LOGGER.error("Failed to delete local data load folder location: " + loc, e);
}
}
LOGGER.info(
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
index 212037b..41d171b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/MeasureFieldConverterImpl.java
@@ -16,7 +16,6 @@
*/
package org.apache.carbondata.processing.loading.converter.impl;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -28,12 +27,14 @@ import org.apache.carbondata.processing.loading.converter.FieldConverter;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* Converter for measure
*/
public class MeasureFieldConverterImpl implements FieldConverter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(MeasureFieldConverterImpl.class.getName());
private int index;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
index 2d4e167..458b3ab 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/converter/impl/RowConverterImpl.java
@@ -27,7 +27,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.row.CarbonRow;
import org.apache.carbondata.core.dictionary.client.DictionaryClient;
@@ -44,13 +43,15 @@ import org.apache.carbondata.processing.loading.converter.RowConverter;
import org.apache.carbondata.processing.loading.exception.BadRecordFoundException;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
+import org.apache.log4j.Logger;
+
/**
* It converts the complete row if necessary, dictionary columns are encoded with dictionary values
* and nondictionary values are converted to binary.
*/
public class RowConverterImpl implements RowConverter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(RowConverterImpl.class.getName());
private CarbonDataLoadConfiguration configuration;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormat.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormat.java b/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormat.java
index 86c71a6..f01aea8 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormat.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVInputFormat.java
@@ -22,7 +22,6 @@ import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.Charset;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonProperties;
@@ -52,6 +51,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.util.LineReader;
+import org.apache.log4j.Logger;
/**
* An {@link org.apache.hadoop.mapreduce.InputFormat} for csv files. Files are broken into lines.
@@ -77,7 +77,7 @@ public class CSVInputFormat extends FileInputFormat<NullWritable, StringArrayWri
public static final int DEFAULT_MAX_NUMBER_OF_COLUMNS_FOR_PARSING = 2000;
public static final int THRESHOLD_MAX_NUMBER_OF_COLUMNS_FOR_PARSING = 20000;
- private static LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CSVInputFormat.class.toString());
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVRecordReaderIterator.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVRecordReaderIterator.java b/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVRecordReaderIterator.java
index 24ef9c1..d963502 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVRecordReaderIterator.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/csvinput/CSVRecordReaderIterator.java
@@ -19,13 +19,11 @@ package org.apache.carbondata.processing.loading.csvinput;
import java.io.IOException;
-
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import com.univocity.parsers.common.TextParsingException;
-
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
index ddd54a4..7fecb12 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
@@ -29,7 +29,6 @@ import org.apache.carbondata.common.Strings;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.constants.LoggerAction;
import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.compression.CompressorFactory;
@@ -45,13 +44,14 @@ import org.apache.carbondata.processing.util.TableOptionConstant;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Builder for {@link CarbonLoadModel}
*/
@InterfaceAudience.Internal
public class CarbonLoadModelBuilder {
- private static final LogService LOGGER = LogServiceFactory.getLogService(
+ private static final Logger LOGGER = LogServiceFactory.getLogService(
CarbonLoadModelBuilder.class.getName());
private CarbonTable table;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
index 98cd90d..a1dee27 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
@@ -27,7 +27,6 @@ import java.util.Map;
import org.apache.carbondata.common.Maps;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
@@ -39,6 +38,7 @@ import org.apache.carbondata.processing.util.CarbonLoaderUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Provide utilities to populate loading options
@@ -46,7 +46,7 @@ import org.apache.hadoop.conf.Configuration;
@InterfaceAudience.Internal
public class LoadOption {
- private static LogService LOG = LogServiceFactory.getLogService(LoadOption.class.getName());
+ private static final Logger LOG = LogServiceFactory.getLogService(LoadOption.class.getName());
/**
* Based on the input options, fill and return data loading options with default value
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/partition/impl/RangePartitionerImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/partition/impl/RangePartitionerImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/partition/impl/RangePartitionerImpl.java
index d59ad02..fd4e037 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/partition/impl/RangePartitionerImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/partition/impl/RangePartitionerImpl.java
@@ -21,14 +21,15 @@ import java.util.Arrays;
import java.util.Comparator;
import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.row.CarbonRow;
import org.apache.carbondata.processing.loading.partition.Partitioner;
+import org.apache.log4j.Logger;
+
@InterfaceAudience.Internal
public class RangePartitionerImpl implements Partitioner<CarbonRow> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(RangePartitionerImpl.class.getName());
private CarbonRow[] rangeBounds;
private Comparator<CarbonRow> comparator;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
index b74b393..aad9083 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
@@ -19,7 +19,6 @@ package org.apache.carbondata.processing.loading.sort;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonProperties;
@@ -31,9 +30,11 @@ import org.apache.carbondata.processing.loading.sort.impl.UnsafeParallelReadMerg
import org.apache.carbondata.processing.loading.sort.impl.UnsafeParallelReadMergeSorterWithColumnRangeImpl;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
public class SorterFactory {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SorterFactory.class.getName());
public static Sorter createSorter(CarbonDataLoadConfiguration configuration, AtomicLong counter) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterImpl.java
index 74e1594..f0920ee 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterImpl.java
@@ -24,7 +24,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
@@ -42,6 +41,8 @@ import org.apache.carbondata.processing.sort.sortdata.SortIntermediateFileMerger
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* It parallely reads data from array of iterates and do merge sort.
* First it sorts the data and write to temp files. These temp files will be merge sorted to get
@@ -49,7 +50,7 @@ import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
*/
public class ParallelReadMergeSorterImpl extends AbstractMergeSorter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ParallelReadMergeSorterImpl.class.getName());
private SortParameters sortParameters;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterWithColumnRangeImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterWithColumnRangeImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterWithColumnRangeImpl.java
index 5419e05..3b767aa 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterWithColumnRangeImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ParallelReadMergeSorterWithColumnRangeImpl.java
@@ -26,7 +26,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
@@ -44,6 +43,8 @@ import org.apache.carbondata.processing.sort.sortdata.SortIntermediateFileMerger
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* It parallely reads data from array of iterates and do merge sort.
* First it sorts the data and write to temp files. These temp files will be merge sorted to get
@@ -52,7 +53,7 @@ import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
* bucketing,sort_column_bounds, it sorts each range of data separately and write to temp files.
*/
public class ParallelReadMergeSorterWithColumnRangeImpl extends AbstractMergeSorter {
- private static final LogService LOGGER = LogServiceFactory.getLogService(
+ private static final Logger LOGGER = LogServiceFactory.getLogService(
ParallelReadMergeSorterWithColumnRangeImpl.class.getName());
private SortParameters originSortParameters;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
index 1b1d383..9cb67df 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
@@ -28,7 +28,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
@@ -47,13 +46,15 @@ import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByEx
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* It parallely reads data from array of iterates and do merge sort.
* It sorts data in batches and send to the next step.
*/
public class UnsafeBatchParallelReadMergeSorterImpl extends AbstractMergeSorter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeBatchParallelReadMergeSorterImpl.class.getName());
private SortParameters sortParameters;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterImpl.java
index afa30c0..6e11ca6 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterImpl.java
@@ -24,7 +24,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -41,6 +40,8 @@ import org.apache.carbondata.processing.loading.sort.unsafe.merger.UnsafeSingleT
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
+import org.apache.log4j.Logger;
+
/**
* It parallely reads data from array of iterates and do merge sort.
* First it sorts the data and write to temp files. These temp files will be merge sorted to get
@@ -48,7 +49,7 @@ import org.apache.carbondata.processing.sort.sortdata.SortParameters;
*/
public class UnsafeParallelReadMergeSorterImpl extends AbstractMergeSorter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeParallelReadMergeSorterImpl.class.getName());
private SortParameters sortParameters;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterWithColumnRangeImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterWithColumnRangeImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterWithColumnRangeImpl.java
index 5766105..a8ec05c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterWithColumnRangeImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeParallelReadMergeSorterWithColumnRangeImpl.java
@@ -27,7 +27,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -45,6 +44,7 @@ import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
/**
* It parallely reads data from array of iterates and do merge sort.
@@ -55,7 +55,7 @@ import org.apache.commons.lang3.StringUtils;
*/
public class UnsafeParallelReadMergeSorterWithColumnRangeImpl extends AbstractMergeSorter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(
UnsafeParallelReadMergeSorterWithColumnRangeImpl.class.getName());
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
index 9c23dde..e199c89 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
@@ -27,7 +27,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -53,11 +52,13 @@ import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.sort.sortdata.TableFieldStat;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
public class UnsafeSortDataRows {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeSortDataRows.class.getName());
/**
* threadStatusObserver
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeFinalMergePageHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeFinalMergePageHolder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeFinalMergePageHolder.java
index b805d37..896af60 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeFinalMergePageHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeFinalMergePageHolder.java
@@ -17,7 +17,6 @@
package org.apache.carbondata.processing.loading.sort.unsafe.holder;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
@@ -25,9 +24,11 @@ import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
import org.apache.carbondata.processing.loading.sort.unsafe.merger.UnsafeInMemoryIntermediateDataMerger;
import org.apache.carbondata.processing.sort.sortdata.IntermediateSortTempRowComparator;
+import org.apache.log4j.Logger;
+
public class UnsafeFinalMergePageHolder implements SortTempChunkHolder {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeFinalMergePageHolder.class.getName());
private int counter;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryHolder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryHolder.java
index baa9e71..e5680de 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryHolder.java
@@ -17,15 +17,16 @@
package org.apache.carbondata.processing.loading.sort.unsafe.holder;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
import org.apache.carbondata.processing.sort.sortdata.IntermediateSortTempRowComparator;
+import org.apache.log4j.Logger;
+
public class UnsafeInmemoryHolder implements SortTempChunkHolder {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeInmemoryHolder.class.getName());
private int counter;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryMergeHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryMergeHolder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryMergeHolder.java
index f8689d9..b47a5d5 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryMergeHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeInmemoryMergeHolder.java
@@ -16,17 +16,18 @@
*/
package org.apache.carbondata.processing.loading.sort.unsafe.holder;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
import org.apache.carbondata.processing.loading.sort.unsafe.comparator.UnsafeRowComparator;
+import org.apache.log4j.Logger;
+
/**
* It is used for merging unsafe inmemory intermediate data
*/
public class UnsafeInmemoryMergeHolder implements Comparable<UnsafeInmemoryMergeHolder> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeInmemoryMergeHolder.class.getName());
private int counter;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeSortTempFileChunkHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeSortTempFileChunkHolder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeSortTempFileChunkHolder.java
index a991d4c..4a97b20 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeSortTempFileChunkHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/holder/UnsafeSortTempFileChunkHolder.java
@@ -27,7 +27,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -40,12 +39,14 @@ import org.apache.carbondata.processing.sort.sortdata.IntermediateSortTempRowCom
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.sort.sortdata.TableFieldStat;
+import org.apache.log4j.Logger;
+
public class UnsafeSortTempFileChunkHolder implements SortTempChunkHolder {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeSortTempFileChunkHolder.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
index a65de16..8a4503b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
@@ -26,7 +26,6 @@ import java.util.PriorityQueue;
import java.util.Random;
import java.util.concurrent.Callable;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -39,11 +38,13 @@ import org.apache.carbondata.processing.loading.sort.unsafe.holder.UnsafeInmemor
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
+import org.apache.log4j.Logger;
+
public class UnsafeInMemoryIntermediateDataMerger implements Callable<Void> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeInMemoryIntermediateDataMerger.class.getName());
/**
@@ -117,7 +118,7 @@ public class UnsafeInMemoryIntermediateDataMerger implements Callable<Void> {
+ ", containing rows: " + totalSize);
}
} catch (Exception e) {
- LOGGER.error(e, "Problem while intermediate merging");
+ LOGGER.error("Problem while intermediate merging", e);
throw e;
} finally {
if (spillDisk) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
index 0a12eda..041544b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
@@ -26,7 +26,6 @@ import java.util.NoSuchElementException;
import java.util.PriorityQueue;
import java.util.concurrent.Callable;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.util.CarbonUtil;
@@ -38,11 +37,13 @@ import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByEx
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.sort.sortdata.TableFieldStat;
+import org.apache.log4j.Logger;
+
public class UnsafeIntermediateFileMerger implements Callable<Void> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeIntermediateFileMerger.class.getName());
/**
@@ -105,7 +106,7 @@ public class UnsafeIntermediateFileMerger implements Callable<Void> {
LOGGER.info("============================== Intermediate Merge of " + fileConterConst
+ " Sort Temp Files Cost Time: " + intermediateMergeCostTime + "(s)");
} catch (Exception e) {
- LOGGER.error(e, "Problem while intermediate merging");
+ LOGGER.error("Problem while intermediate merging", e);
clear();
throwable = e;
} finally {
@@ -114,7 +115,7 @@ public class UnsafeIntermediateFileMerger implements Callable<Void> {
try {
finish();
} catch (CarbonSortKeyAndGroupByException e) {
- LOGGER.error(e, "Problem while deleting the merge file");
+ LOGGER.error("Problem while deleting the merge file", e);
throwable = e;
}
} else {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
index 18f35d3..f972f0c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
@@ -27,7 +27,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.memory.UnsafeSortMemoryManager;
@@ -37,12 +36,14 @@ import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
+import org.apache.log4j.Logger;
+
/**
* It does mergesort intermediate files to big file.
*/
public class UnsafeIntermediateMerger {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeIntermediateMerger.class.getName());
/**
@@ -211,7 +212,7 @@ public class UnsafeIntermediateMerger {
try {
mergerTask.get(i).get();
} catch (InterruptedException | ExecutionException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonSortKeyAndGroupByException(e.getMessage(), e);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
index 2dd2f31..7e36389 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
@@ -28,7 +28,6 @@ import java.util.NoSuchElementException;
import java.util.PriorityQueue;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
@@ -40,11 +39,13 @@ import org.apache.carbondata.processing.loading.sort.unsafe.holder.UnsafeInmemor
import org.apache.carbondata.processing.loading.sort.unsafe.holder.UnsafeSortTempFileChunkHolder;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
+import org.apache.log4j.Logger;
+
public class UnsafeSingleThreadFinalSortFilesMerger extends CarbonIterator<Object[]> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeSingleThreadFinalSortFilesMerger.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
index 7683bbc..ae42df7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
@@ -25,7 +25,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -49,13 +48,15 @@ import org.apache.carbondata.processing.store.CarbonFactHandler;
import org.apache.carbondata.processing.store.CarbonFactHandlerFactory;
import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
+import org.apache.log4j.Logger;
+
/**
* It reads data from sorted files which are generated in previous sort step.
* And it writes data to carbondata file. It also generates mdk key while writing to carbondata file
*/
public class CarbonRowDataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonRowDataWriterProcessorStepImpl.class.getName());
private int dimensionWithComplexCount;
@@ -146,11 +147,11 @@ public class CarbonRowDataWriterProcessorStepImpl extends AbstractDataLoadProces
}
}
} catch (CarbonDataWriterException e) {
- LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
+ LOGGER.error("Failed for table: " + tableName + " in DataWriterProcessorStepImpl", e);
throw new CarbonDataLoadingException(
"Error while initializing data handler : " + e.getMessage());
} catch (Exception e) {
- LOGGER.error(e, "Failed for table: " + tableName + " in DataWriterProcessorStepImpl");
+ LOGGER.error("Failed for table: " + tableName + " in DataWriterProcessorStepImpl", e);
if (e instanceof BadRecordFoundException) {
throw new BadRecordFoundException(e.getMessage(), e);
}
@@ -198,7 +199,7 @@ public class CarbonRowDataWriterProcessorStepImpl extends AbstractDataLoadProces
} catch (Exception e) {
// if throw exception from here dataHandler will not be closed.
// so just holding exception and later throwing exception
- LOGGER.error(e, "Failed for table: " + tableName + " in finishing data handler");
+ LOGGER.error("Failed for table: " + tableName + " in finishing data handler", e);
exception = new CarbonDataWriterException(
"Failed for table: " + tableName + " in finishing data handler", e);
}
@@ -229,10 +230,10 @@ public class CarbonRowDataWriterProcessorStepImpl extends AbstractDataLoadProces
try {
dataHandler.closeHandler();
} catch (CarbonDataWriterException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataLoadingException(e.getMessage());
} catch (Exception e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage());
}
}
[6/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j
Logger directly
Posted by xu...@apache.org.
[CARBONDATA-3024] Refactor to use log4j Logger directly
Currently CarbonData's log is printing the line number in StandardLogService, it is not good for maintainability, a better way is to use log4j Logger directly so that it will print line number of where we are logging.
This closes #2827
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/06adb5a0
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/06adb5a0
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/06adb5a0
Branch: refs/heads/master
Commit: 06adb5a0376255c15f8c393257d6db0736e35f31
Parents: 15d3826
Author: Jacky Li <ja...@qq.com>
Authored: Wed Oct 17 20:27:15 2018 +0800
Committer: xuchuanyin <xu...@hust.edu.cn>
Committed: Thu Oct 18 09:56:12 2018 +0800
----------------------------------------------------------------------
.../carbondata/common/logging/LogService.java | 80 ++++++-
.../common/logging/LogServiceFactory.java | 6 +-
.../carbondata/common/logging/impl/Audit.java | 49 ++++
.../common/logging/impl/StandardLogService.java | 236 -------------------
.../logging/LogServiceFactoryTest_UT.java | 7 +-
.../logging/ft/LoggingServiceTest_FT.java | 9 +-
.../logging/impl/StandardLogServiceTest_UT.java | 140 -----------
.../carbondata/core/cache/CacheProvider.java | 5 +-
.../carbondata/core/cache/CarbonLRUCache.java | 5 +-
.../dictionary/ForwardDictionaryCache.java | 5 +-
.../dictionary/ManageDictionaryAndBTree.java | 5 +-
.../dictionary/ReverseDictionaryCache.java | 5 +-
.../core/constants/CarbonVersionConstants.java | 9 +-
.../core/datamap/DataMapStoreManager.java | 10 +-
.../carbondata/core/datamap/DataMapUtil.java | 4 +-
.../status/DiskBasedDataMapStatusProvider.java | 17 +-
.../block/SegmentPropertiesAndSchemaHolder.java | 5 +-
.../blocklet/BlockletEncodedColumnPage.java | 5 +-
.../datastore/compression/SnappyCompressor.java | 32 +--
.../filesystem/AbstractDFSCarbonFile.java | 4 +-
.../datastore/filesystem/AlluxioCarbonFile.java | 6 +-
.../datastore/filesystem/HDFSCarbonFile.java | 4 +-
.../datastore/filesystem/LocalCarbonFile.java | 7 +-
.../core/datastore/filesystem/S3CarbonFile.java | 4 +-
.../datastore/filesystem/ViewFSCarbonFile.java | 4 +-
.../core/datastore/impl/FileFactory.java | 4 +-
.../datastore/page/LocalDictColumnPage.java | 9 +-
.../page/encoding/ColumnPageEncoder.java | 5 +-
.../client/NonSecureDictionaryClient.java | 7 +-
.../NonSecureDictionaryClientHandler.java | 13 +-
.../IncrementalColumnDictionaryGenerator.java | 7 +-
.../generator/TableDictionaryGenerator.java | 5 +-
.../server/NonSecureDictionaryServer.java | 6 +-
.../NonSecureDictionaryServerHandler.java | 6 +-
.../service/AbstractDictionaryServer.java | 5 +-
.../AtomicFileOperationsImpl.java | 5 +-
.../indexstore/BlockletDataMapIndexStore.java | 4 +-
.../core/indexstore/BlockletDetailInfo.java | 8 +-
.../indexstore/blockletindex/BlockDataMap.java | 13 +-
.../blockletindex/SegmentIndexFileStore.java | 4 +-
.../DateDirectDictionaryGenerator.java | 5 +-
.../TimeStampDirectDictionaryGenerator.java | 5 +-
.../core/locks/CarbonLockFactory.java | 5 +-
.../carbondata/core/locks/CarbonLockUtil.java | 4 +-
.../carbondata/core/locks/HdfsFileLock.java | 5 +-
.../carbondata/core/locks/LocalFileLock.java | 5 +-
.../carbondata/core/locks/S3FileLock.java | 7 +-
.../carbondata/core/locks/ZooKeeperLocking.java | 10 +-
.../carbondata/core/locks/ZookeeperInit.java | 4 +-
.../core/memory/IntPointerBuffer.java | 5 +-
.../core/memory/UnsafeMemoryManager.java | 5 +-
.../core/memory/UnsafeSortMemoryManager.java | 5 +-
.../core/metadata/SegmentFileStore.java | 4 +-
.../core/metadata/schema/table/CarbonTable.java | 4 +-
.../core/metadata/schema/table/TableInfo.java | 5 +-
.../core/mutate/CarbonUpdateUtil.java | 4 +-
.../core/mutate/DeleteDeltaBlockDetails.java | 5 +-
.../core/mutate/SegmentUpdateDetails.java | 5 +-
.../reader/CarbonDeleteDeltaFileReaderImpl.java | 8 -
.../reader/CarbonDeleteFilesDataReader.java | 4 +-
.../CarbonDictionarySortIndexReaderImpl.java | 6 +-
.../scan/collector/ResultCollectorFactory.java | 16 +-
.../RestructureBasedRawResultCollector.java | 8 -
.../executor/impl/AbstractQueryExecutor.java | 8 +-
.../impl/SearchModeDetailQueryExecutor.java | 4 +-
.../SearchModeVectorDetailQueryExecutor.java | 4 +-
.../expression/RangeExpressionEvaluator.java | 5 +-
.../scan/filter/FilterExpressionProcessor.java | 5 +-
.../carbondata/core/scan/filter/FilterUtil.java | 6 +-
.../executer/RowLevelFilterExecuterImpl.java | 5 +-
.../core/scan/model/QueryModelBuilder.java | 5 +-
.../core/scan/result/BlockletScannedResult.java | 5 +-
.../AbstractDetailQueryResultIterator.java | 5 +-
.../scan/result/iterator/RawResultIterator.java | 5 +-
.../DriverQueryStatisticsRecorderImpl.java | 7 +-
.../core/stats/QueryStatisticsRecorderImpl.java | 11 +-
.../core/statusmanager/LoadMetadataDetails.java | 5 +-
.../statusmanager/SegmentStatusManager.java | 25 +-
.../SegmentUpdateStatusManager.java | 4 +-
.../core/util/CarbonLoadStatisticsImpl.java | 5 +-
.../core/util/CarbonMetadataUtil.java | 5 +-
.../carbondata/core/util/CarbonProperties.java | 5 +-
.../apache/carbondata/core/util/CarbonUtil.java | 6 +-
.../carbondata/core/util/DataTypeUtil.java | 5 +-
.../carbondata/core/util/DeleteLoadFolders.java | 5 +-
.../core/util/ObjectSizeCalculator.java | 7 +-
.../carbondata/core/util/SessionParams.java | 9 +-
.../carbondata/core/util/TaskMetricsMap.java | 4 +-
.../core/util/path/HDFSLeaseUtils.java | 8 +-
.../writer/CarbonDeleteDeltaWriterImpl.java | 4 +-
.../core/writer/CarbonDictionaryWriterImpl.java | 6 +-
.../CarbonDictionarySortIndexWriterImpl.java | 9 +-
.../filesystem/HDFSCarbonFileTest.java | 18 +-
.../core/load/LoadMetadataDetailsUnitTest.java | 13 +-
.../bloom/AbstractBloomDataMapWriter.java | 4 -
.../datamap/bloom/BloomCoarseGrainDataMap.java | 18 +-
.../bloom/BloomCoarseGrainDataMapFactory.java | 11 +-
.../datamap/bloom/BloomDataMapCache.java | 4 -
.../datamap/bloom/BloomIndexFileStore.java | 20 +-
.../datamap/examples/MinMaxDataWriter.java | 4 +-
.../datamap/examples/MinMaxIndexDataMap.java | 4 +-
.../examples/MinMaxIndexDataMapFactory.java | 4 +-
.../datamap/lucene/LuceneDataMapBuilder.java | 4 +-
.../lucene/LuceneDataMapFactoryBase.java | 6 +-
.../datamap/lucene/LuceneDataMapWriter.java | 4 +-
.../datamap/lucene/LuceneFineGrainDataMap.java | 6 +-
.../carbondata/mv/datamap/MVAnalyzerRule.scala | 3 +-
.../carbondata/examples/sdk/SDKS3Example.java | 10 +-
.../examples/sdk/SDKS3ReadExample.java | 6 +-
.../hadoop/api/CarbonOutputCommitter.java | 4 +-
.../hadoop/testutil/StoreCreator.java | 4 +-
.../hadoop/util/CarbonInputFormatUtil.java | 4 +-
.../hive/MapredCarbonInputFormat.java | 5 +-
.../carbondata/presto/CarbondataPageSource.java | 11 +-
.../presto/impl/CarbonTableReader.java | 12 +-
.../blockprune/BlockPruneQueryTestCase.scala | 6 +-
.../CarbonCustomBlockDistributionTest.scala | 6 +-
.../client/SecureDictionaryClient.java | 9 +-
.../client/SecureDictionaryClientHandler.java | 6 +-
.../server/SecureDictionaryServer.java | 24 +-
.../server/SecureDictionaryServerHandler.java | 4 +-
.../org/apache/carbondata/api/CarbonStore.scala | 13 +-
.../load/DataLoadProcessorStepOnSpark.scala | 8 +-
.../spark/rdd/AlterTableDropColumnRDD.scala | 2 +-
.../carbondata/spark/rdd/CarbonMergerRDD.scala | 2 +-
.../spark/rdd/NewCarbonDataLoadRDD.scala | 10 +-
.../carbondata/spark/rdd/PartitionDropper.scala | 8 +-
.../spark/rdd/PartitionSplitter.scala | 6 +-
.../carbondata/spark/rdd/StreamHandoffRDD.scala | 8 +-
.../carbondata/spark/util/CarbonScalaUtil.scala | 3 +-
.../spark/util/GlobalDictionaryUtil.scala | 6 +-
.../command/carbonTableSchemaCommon.scala | 8 +-
.../streaming/CarbonAppendableStreamSink.scala | 2 +-
.../sql/test/ResourceRegisterAndCopier.scala | 4 +-
.../vectorreader/ColumnarVectorWrapper.java | 1 -
.../VectorizedCarbonRecordReader.java | 4 +-
.../datamap/IndexDataMapRebuildRDD.scala | 4 +-
.../spark/rdd/AggregateDataMapCompactor.scala | 6 +-
.../spark/rdd/CarbonDataRDDFactory.scala | 34 +--
.../spark/rdd/CarbonTableCompactor.scala | 12 +-
.../apache/carbondata/spark/rdd/Compactor.scala | 2 +-
.../carbondata/stream/StreamJobManager.scala | 7 +-
.../org/apache/spark/sql/CarbonSession.scala | 2 +-
.../events/MergeBloomIndexEventListener.scala | 7 +-
.../sql/events/MergeIndexEventListener.scala | 17 +-
.../datamap/CarbonCreateDataMapCommand.scala | 4 +-
.../datamap/CarbonDropDataMapCommand.scala | 9 +-
.../CarbonAlterTableCompactionCommand.scala | 15 +-
.../CarbonAlterTableFinishStreaming.scala | 2 +-
.../management/CarbonCleanFilesCommand.scala | 2 +-
.../management/CarbonInsertIntoCommand.scala | 2 +-
.../management/CarbonLoadDataCommand.scala | 29 +--
.../management/RefreshCarbonTableCommand.scala | 14 +-
.../CarbonProjectForDeleteCommand.scala | 6 +-
.../CarbonProjectForUpdateCommand.scala | 2 +-
.../command/mutation/DeleteExecution.scala | 19 +-
.../command/mutation/HorizontalCompaction.scala | 17 +-
.../CarbonAlterTableDropPartitionCommand.scala | 21 +-
.../CarbonAlterTableSplitPartitionCommand.scala | 18 +-
.../preaaggregate/PreAggregateListeners.scala | 4 +-
.../preaaggregate/PreAggregateUtil.scala | 5 +-
.../CarbonAlterTableAddColumnCommand.scala | 11 +-
.../CarbonAlterTableDataTypeChangeCommand.scala | 15 +-
.../CarbonAlterTableDropColumnCommand.scala | 11 +-
.../schema/CarbonAlterTableRenameCommand.scala | 15 +-
.../CarbonCreateTableAsSelectCommand.scala | 8 +-
.../table/CarbonCreateTableCommand.scala | 12 +-
.../command/table/CarbonDropTableCommand.scala | 12 +-
.../sql/execution/strategy/DDLStrategy.scala | 3 +-
.../sql/parser/CarbonSpark2SqlParser.scala | 4 +-
.../org/apache/spark/util/AlterTableUtil.scala | 10 +-
.../datamap/DataMapWriterListener.java | 7 +-
.../processing/datatypes/PrimitiveDataType.java | 5 +-
.../loading/AbstractDataLoadProcessorStep.java | 5 +-
.../processing/loading/BadRecordsLogger.java | 5 +-
.../processing/loading/DataLoadExecutor.java | 7 +-
.../loading/DataLoadProcessBuilder.java | 4 +-
.../loading/TableProcessingOperations.java | 6 +-
.../impl/MeasureFieldConverterImpl.java | 5 +-
.../converter/impl/RowConverterImpl.java | 5 +-
.../loading/csvinput/CSVInputFormat.java | 4 +-
.../csvinput/CSVRecordReaderIterator.java | 2 -
.../loading/model/CarbonLoadModelBuilder.java | 4 +-
.../processing/loading/model/LoadOption.java | 4 +-
.../partition/impl/RangePartitionerImpl.java | 5 +-
.../processing/loading/sort/SorterFactory.java | 5 +-
.../sort/impl/ParallelReadMergeSorterImpl.java | 5 +-
...allelReadMergeSorterWithColumnRangeImpl.java | 5 +-
.../UnsafeBatchParallelReadMergeSorterImpl.java | 5 +-
.../impl/UnsafeParallelReadMergeSorterImpl.java | 5 +-
...allelReadMergeSorterWithColumnRangeImpl.java | 4 +-
.../loading/sort/unsafe/UnsafeSortDataRows.java | 5 +-
.../holder/UnsafeFinalMergePageHolder.java | 5 +-
.../unsafe/holder/UnsafeInmemoryHolder.java | 5 +-
.../holder/UnsafeInmemoryMergeHolder.java | 5 +-
.../holder/UnsafeSortTempFileChunkHolder.java | 5 +-
.../UnsafeInMemoryIntermediateDataMerger.java | 7 +-
.../merger/UnsafeIntermediateFileMerger.java | 9 +-
.../unsafe/merger/UnsafeIntermediateMerger.java | 7 +-
.../UnsafeSingleThreadFinalSortFilesMerger.java | 5 +-
.../CarbonRowDataWriterProcessorStepImpl.java | 15 +-
.../steps/DataWriterBatchProcessorStepImpl.java | 9 +-
.../steps/DataWriterProcessorStepImpl.java | 17 +-
.../merger/CarbonCompactionExecutor.java | 6 +-
.../processing/merger/CarbonCompactionUtil.java | 4 +-
.../processing/merger/CarbonDataMergerUtil.java | 17 +-
.../merger/CompactionResultSortProcessor.java | 7 +-
.../merger/RowResultMergerProcessor.java | 5 +-
.../spliter/AbstractCarbonQueryExecutor.java | 6 +-
.../partition/spliter/CarbonSplitExecutor.java | 4 +-
.../partition/spliter/RowResultProcessor.java | 7 +-
.../sort/sortdata/IntermediateFileMerger.java | 9 +-
.../SingleThreadFinalSortFilesMerger.java | 5 +-
.../processing/sort/sortdata/SortDataRows.java | 7 +-
.../sortdata/SortIntermediateFileMerger.java | 7 +-
.../sort/sortdata/SortParameters.java | 4 +-
.../sort/sortdata/SortTempFileChunkHolder.java | 5 +-
.../store/CarbonFactDataHandlerColumnar.java | 17 +-
.../store/CarbonFactDataHandlerModel.java | 5 +-
.../carbondata/processing/store/TablePage.java | 5 +-
.../store/writer/AbstractFactDataWriter.java | 5 +-
.../writer/v3/CarbonFactDataWriterImplV3.java | 11 +-
.../processing/util/CarbonBadRecordUtil.java | 4 +-
.../util/CarbonDataProcessorUtil.java | 6 +-
.../processing/util/CarbonLoaderUtil.java | 9 +-
.../processing/util/CarbonLoaderUtilTest.java | 4 +-
.../carbondata/sdk/file/AvroCarbonWriter.java | 4 +-
.../carbondata/store/LocalCarbonStore.java | 4 +-
.../carbondata/sdk/file/CarbonReaderTest.java | 7 +-
.../store/worker/SearchRequestHandler.java | 13 +-
.../streaming/CarbonStreamRecordWriter.java | 5 +-
.../streaming/segment/StreamSegment.java | 8 +-
232 files changed, 1017 insertions(+), 1139 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/main/java/org/apache/carbondata/common/logging/LogService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/LogService.java b/common/src/main/java/org/apache/carbondata/common/logging/LogService.java
index ee02aba..55dade6 100644
--- a/common/src/main/java/org/apache/carbondata/common/logging/LogService.java
+++ b/common/src/main/java/org/apache/carbondata/common/logging/LogService.java
@@ -17,35 +17,89 @@
package org.apache.carbondata.common.logging;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.apache.carbondata.common.logging.impl.AuditLevel;
+import org.apache.carbondata.common.logging.impl.StatisticLevel;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+
/**
- * for Log Services
+ * Log Services, wrapper of org.apache.log4j.Logger
*/
-public interface LogService {
+public class LogService extends Logger {
+
+ private static String hostName;
+ private static String username;
+
+ {
+ try {
+ hostName = InetAddress.getLocalHost().getHostName();
+ } catch (UnknownHostException e) {
+ hostName = "localhost";
+ }
+ try {
+ username = UserGroupInformation.getCurrentUser().getShortUserName();
+ } catch (IOException e) {
+ username = "unknown";
+ }
+ }
+
+ protected LogService(String name) {
+ super(name);
+ }
- void debug(String message);
+ public void debug(String message) {
+ super.debug(message);
+ }
- void info(String message);
+ public void info(String message) {
+ super.info(message);
+ }
- void warn(String message);
+ public void warn(String message) {
+ super.warn(message);
+ }
- void error(String message);
+ public void error(String message) {
+ super.error(message);
+ }
- void error(Throwable throwable);
+ public void error(Throwable throwable) {
+ super.error(throwable);
+ }
- void error(Throwable throwable, String message);
+ public void error(Throwable throwable, String message) {
+ super.error(message, throwable);
+ }
- void audit(String message);
+ public void audit(String message) {
+ String threadid = Thread.currentThread().getId() + "";
+ super.log(AuditLevel.AUDIT,
+ "[" + hostName + "]" + "[" + username + "]" + "[Thread-" + threadid + "]" + message);
+ }
/**
* Below method will be used to log the statistic information
*
* @param message statistic message
*/
- void statistic(String message);
+ public void statistic(String message) {
+ super.log(StatisticLevel.STATISTIC, message);
+ }
- boolean isDebugEnabled();
+ public boolean isDebugEnabled() {
+ return super.isDebugEnabled();
+ }
- boolean isWarnEnabled();
+ public boolean isWarnEnabled() {
+ return super.isEnabledFor(org.apache.log4j.Level.WARN);
+ }
- boolean isInfoEnabled();
+ public boolean isInfoEnabled() {
+ return super.isInfoEnabled();
+ }
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java b/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
index cf84d2d..abe0b1c 100644
--- a/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
+++ b/common/src/main/java/org/apache/carbondata/common/logging/LogServiceFactory.java
@@ -17,7 +17,7 @@
package org.apache.carbondata.common.logging;
-import org.apache.carbondata.common.logging.impl.StandardLogService;
+import org.apache.log4j.Logger;
/**
* Log Service factory
@@ -33,7 +33,7 @@ public final class LogServiceFactory {
* @param className provides class name
* @return LogService
*/
- public static LogService getLogService(final String className) {
- return new StandardLogService(className);
+ public static Logger getLogService(final String className) {
+ return Logger.getLogger(className);
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/main/java/org/apache/carbondata/common/logging/impl/Audit.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/impl/Audit.java b/common/src/main/java/org/apache/carbondata/common/logging/impl/Audit.java
new file mode 100644
index 0000000..1c822b9
--- /dev/null
+++ b/common/src/main/java/org/apache/carbondata/common/logging/impl/Audit.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.common.logging.impl;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+
+public class Audit {
+ private static String hostName;
+ private static String username;
+
+ static {
+ try {
+ hostName = InetAddress.getLocalHost().getHostName();
+ } catch (UnknownHostException e) {
+ hostName = "localhost";
+ }
+ try {
+ username = UserGroupInformation.getCurrentUser().getShortUserName();
+ } catch (IOException e) {
+ username = "unknown";
+ }
+ }
+
+ public static void log(Logger logger, String message) {
+ String threadid = String.valueOf(Thread.currentThread().getId());
+ logger.log(AuditLevel.AUDIT,
+ "[" + hostName + "]" + "[" + username + "]" + "[Thread-" + threadid + "]" + message);
+ }
+}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/main/java/org/apache/carbondata/common/logging/impl/StandardLogService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/impl/StandardLogService.java b/common/src/main/java/org/apache/carbondata/common/logging/impl/StandardLogService.java
deleted file mode 100644
index bec4b5d..0000000
--- a/common/src/main/java/org/apache/carbondata/common/logging/impl/StandardLogService.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.common.logging.impl;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-
-import org.apache.carbondata.common.logging.LogService;
-
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.apache.log4j.MDC;
-
-/**
- * Default Implementation of the <code>LogService</code>
- */
-public final class StandardLogService implements LogService {
-
- private static final String PARTITION_ID = "[partitionID:";
- private Logger logger;
-
- /**
- * Constructor.
- *
- * @param clazzName for which the Logging is required
- */
- public StandardLogService(String clazzName) {
- logger = Logger.getLogger(clazzName);
- }
-
- public StandardLogService() {
- this("Carbon");
- }
-
- public static String getPartitionID(String tableName) {
- return tableName.substring(tableName.lastIndexOf('_') + 1, tableName.length());
- }
-
- public static void setThreadName(String partitionID, String queryID) {
- StringBuilder b = new StringBuilder(PARTITION_ID);
- b.append(partitionID);
- if (null != queryID) {
- b.append(";queryID:");
- b.append(queryID);
- }
- b.append("]");
- Thread.currentThread().setName(getThreadName() + b.toString());
- }
-
- private static String getThreadName() {
- String name = Thread.currentThread().getName();
- int index = name.indexOf(PARTITION_ID);
- if (index > -1) {
- name = name.substring(0, index);
- } else {
- name = '[' + name + ']';
- }
- return name.trim();
- }
-
- public boolean isDebugEnabled() {
- return logger.isDebugEnabled();
- }
-
- public boolean isWarnEnabled() {
- return logger.isEnabledFor(org.apache.log4j.Level.WARN);
- }
-
- public void debug(String message) {
- if (logger.isDebugEnabled()) {
- logMessage(Level.DEBUG, null, message);
- }
- }
-
- public void error(String message) {
- logMessage(Level.ERROR, null, message);
- }
-
- public void error(Throwable throwable, String message) {
- logMessage(Level.ERROR, throwable, message);
- }
-
- public void error(Throwable throwable) {
- logMessage(Level.ERROR, throwable, "");
- }
-
- public void info(String message) {
- if (logger.isInfoEnabled()) {
- logMessage(Level.INFO, null, message);
- }
- }
-
- /**
- * Utility Method to log the the Message.
- */
- private void logMessage(Level logLevel, Throwable throwable, String message) {
- try {
- //Append the partition id and query id if exist
- StringBuilder buff = new StringBuilder(Thread.currentThread().getName());
- buff.append(" ");
- buff.append(message);
- message = buff.toString();
- if (Level.ERROR.toString().equalsIgnoreCase(logLevel.toString())) {
- logErrorMessage(throwable, message);
- } else if (Level.DEBUG.toString().equalsIgnoreCase(logLevel.toString())) {
- logDebugMessage(throwable, message);
- } else if (Level.INFO.toString().equalsIgnoreCase(logLevel.toString())) {
- logInfoMessage(throwable, message);
- } else if (Level.WARN.toString().equalsIgnoreCase(logLevel.toString())) {
- logWarnMessage(throwable, message);
- } else if (Level.AUDIT.toString().equalsIgnoreCase(logLevel.toString())) {
- audit(message);
- } else if (Level.STATISTICS == logLevel) {
- statistic(message);
- }
-
- } catch (Throwable t) {
- logger.error(t);
- }
- }
-
- private void logErrorMessage(Throwable throwable, String message) {
-
- if (null == throwable) {
- logger.error(message);
- } else {
- logger.error(message, throwable);
- }
- }
-
- private void logInfoMessage(Throwable throwable, String message) {
-
- if (null == throwable) {
- logger.info(message);
- } else {
- logger.info(message, throwable);
- }
- }
-
- private void logDebugMessage(Throwable throwable, String message) {
-
- if (null == throwable) {
- logger.debug(message);
- } else {
- logger.debug(message, throwable);
- }
- }
-
- private void logWarnMessage(Throwable throwable, String message) {
-
- if (null == throwable) {
- logger.warn(message);
- } else {
- logger.warn(message, throwable);
- }
- }
-
- public boolean isInfoEnabled() {
- return logger.isInfoEnabled();
- }
-
- public void warn(String message) {
- if (isWarnEnabled()) {
- logMessage(Level.WARN, null, message);
- }
- }
-
- public void setEventProperties(String propertyName, String propertyValue) {
- MDC.put(propertyName, propertyValue);
- }
-
- /**
- * log audit log
- *
- * @param msg audit log message
- */
- @Override public void audit(String msg) {
- String hostName;
- String username;
- try {
- hostName = InetAddress.getLocalHost().getHostName();
- } catch (UnknownHostException e) {
- hostName = "localhost";
- }
- try {
- username = UserGroupInformation.getCurrentUser().getShortUserName();
- } catch (IOException e) {
- username = "unknown";
- }
- String threadid = Thread.currentThread().getId() + "";
- logger.log(AuditLevel.AUDIT,
- "[" + hostName + "]" + "[" + username + "]" + "[Thread-" + threadid + "]" + msg);
- }
-
- @Override public void statistic(String message) {
- logger.log(StatisticLevel.STATISTIC, message);
- }
-
- /**
- * Specifies the logging level.
- */
- enum Level {
-
- NONE(0),
- DEBUG(1),
- INFO(2),
- STATISTICS(3),
- ERROR(4),
- AUDIT(5),
- WARN(6);
-
- /**
- * Constructor.
- *
- * @param level
- */
- Level(final int level) {
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/test/java/org/apache/carbondata/common/logging/LogServiceFactoryTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/carbondata/common/logging/LogServiceFactoryTest_UT.java b/common/src/test/java/org/apache/carbondata/common/logging/LogServiceFactoryTest_UT.java
index 5c8b64c..d0e1c0c 100644
--- a/common/src/test/java/org/apache/carbondata/common/logging/LogServiceFactoryTest_UT.java
+++ b/common/src/test/java/org/apache/carbondata/common/logging/LogServiceFactoryTest_UT.java
@@ -17,9 +17,8 @@
package org.apache.carbondata.common.logging;
-import org.apache.carbondata.common.logging.impl.StandardLogService;
-
import junit.framework.TestCase;
+import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -33,8 +32,8 @@ public class LogServiceFactoryTest_UT extends TestCase {
}
@Test public void testGetLogService() {
- LogService logger = LogServiceFactory.getLogService(this.getClass().getName());
- assertTrue(logger instanceof StandardLogService);
+ Logger logger = LogServiceFactory.getLogService(this.getClass().getName());
+ assertTrue(logger instanceof Logger);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/test/java/org/apache/carbondata/common/logging/ft/LoggingServiceTest_FT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/carbondata/common/logging/ft/LoggingServiceTest_FT.java b/common/src/test/java/org/apache/carbondata/common/logging/ft/LoggingServiceTest_FT.java
index 1f000ce..867a154 100644
--- a/common/src/test/java/org/apache/carbondata/common/logging/ft/LoggingServiceTest_FT.java
+++ b/common/src/test/java/org/apache/carbondata/common/logging/ft/LoggingServiceTest_FT.java
@@ -24,11 +24,14 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
+import org.apache.carbondata.common.logging.impl.AuditLevel;
import junit.framework.TestCase;
import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
import org.apache.log4j.MDC;
import org.junit.Assert;
import org.junit.Before;
@@ -36,7 +39,7 @@ import org.junit.Test;
public class LoggingServiceTest_FT extends TestCase {
- private static LogService logger =
+ private static Logger logger =
LogServiceFactory.getLogService(LoggingServiceTest_FT.class.getName());
@Before public void setUp() throws Exception {
@@ -56,7 +59,7 @@ public class LoggingServiceTest_FT extends TestCase {
String expectedAuditLine =
"[main] AUDIT [org.apache.carbondata.common.logging.ft.LoggingServiceTest_FT] 127.0.0.1 "
+ "testuser Function Test log- audit message created";
- logger.audit("audit message created");
+ Audit.log(logger, "audit message created");
LogManager.shutdown();
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/common/src/test/java/org/apache/carbondata/common/logging/impl/StandardLogServiceTest_UT.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/carbondata/common/logging/impl/StandardLogServiceTest_UT.java b/common/src/test/java/org/apache/carbondata/common/logging/impl/StandardLogServiceTest_UT.java
deleted file mode 100644
index faa90eb..0000000
--- a/common/src/test/java/org/apache/carbondata/common/logging/impl/StandardLogServiceTest_UT.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.common.logging.impl;
-
-import junit.framework.TestCase;
-import mockit.Mock;
-import mockit.MockUp;
-import org.apache.log4j.Category;
-import org.apache.log4j.Priority;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class StandardLogServiceTest_UT extends TestCase {
-
- private StandardLogService logService = null;
-
- /**
- * @throws Exception
- */
- @Before public void setUp() throws Exception {
-
- new MockUp<Category>() {
- @SuppressWarnings("unused")
- @Mock public boolean isDebugEnabled() {
- return true;
- }
-
- @SuppressWarnings("unused")
- @Mock public boolean isEnabledFor(Priority level) {
- return true;
- }
-
- @SuppressWarnings("unused")
- @Mock public boolean isInfoEnabled() {
- return true;
- }
- };
-
- logService = new StandardLogService(this.getClass().getName());
- }
-
- /**
- * @throws Exception
- * @Description : tearDown
- */
- @After public void tearDown() throws Exception {
- }
-
- @Test public void testStandardLogService() {
- if (logService != null && logService instanceof StandardLogService) {
- Assert.assertTrue(true);
- } else {
- Assert.assertTrue(false);
- }
- }
-
- @Test public void testIsDebugEnabled() {
- Assert.assertEquals(true, logService.isDebugEnabled());
- }
-
- @Test public void testIsWarnEnabled() {
- Assert.assertEquals(true, logService.isWarnEnabled());
- }
-
- @Test public void testSecureLogEventObjectArray() {
- Assert.assertTrue(true);
- }
-
- @Test public void testAuditLogEventObjectArray() {
- logService.audit("testing");
- Assert.assertTrue(true);
- }
-
- @Test public void testDebugLogEventObjectArray() {
- logService.debug("testing");
- Assert.assertTrue(true);
- }
-
- @Test public void testErrorLogEventObjectArray() {
- logService.error("testing");
- Assert.assertTrue(true);
- }
-
- @Test public void testErrorLogEventThrowableObjectArray() {
- Exception exception = new Exception("test");
- logService.error(exception);
- Assert.assertTrue(true);
- }
-
- @Test public void testErrorLogEventThrowableMessage() {
- Exception exception = new Exception("test");
- logService.error(exception, "additional message");
- Assert.assertTrue(true);
- }
-
- @Test public void testInfoLogEventObjectArray() {
- logService.info("testing");
- Assert.assertTrue(true);
- }
-
- @Test public void testIsInfoEnabled() {
- Assert.assertEquals(true, logService.isInfoEnabled());
- }
-
- @Test public void testDeleteLogs() {
- Assert.assertTrue(true);
- }
-
- @Test public void testFlushLogs() {
- Assert.assertTrue(true);
- }
-
- @Test public void testSetEventProperties() {
- logService.setEventProperties("CLIENT_IP", "127.0.0.1");
- Assert.assertTrue(true);
- }
-
- @Test public void testAuditString() {
- logService.audit("audit message");
- Assert.assertTrue(true);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java b/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
index 292d9eb..d809c25 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CacheProvider.java
@@ -21,7 +21,6 @@ import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
@@ -31,6 +30,8 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.indexstore.BlockletDataMapIndexStore;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* Cache provider class which will create a cache based on given type
*/
@@ -59,7 +60,7 @@ public class CacheProvider {
/**
* instance for CacheProvider LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CacheProvider.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java b/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
index 4a0c36c..87254e3 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
@@ -23,11 +23,12 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* class which manages the lru cache
*/
@@ -39,7 +40,7 @@ public final class CarbonLRUCache {
/**
* Attribute for Carbon LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonLRUCache.class.getName());
/**
* Map that will contain key as table unique name and value as cache Holder
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
index 55a1c05..1607f0f 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
@@ -28,7 +28,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.CacheType;
import org.apache.carbondata.core.cache.CarbonLRUCache;
@@ -37,6 +36,8 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.ObjectSizeCalculator;
import org.apache.carbondata.core.util.TaskMetricsMap;
+import org.apache.log4j.Logger;
+
/**
* This class implements methods to create dictionary cache which will hold
* dictionary chunks for look up of surrogate keys and values
@@ -47,7 +48,7 @@ public class ForwardDictionaryCache<K extends
/**
* Attribute for Carbon LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ForwardDictionaryCache.class.getName());
private static final Map<DictionaryColumnUniqueIdentifier, Object> DICTIONARY_LOCK_OBJECT =
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
index 2450f85..038b356 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ManageDictionaryAndBTree.java
@@ -20,7 +20,6 @@ package org.apache.carbondata.core.cache.dictionary;
import java.io.IOException;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
@@ -35,6 +34,8 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.log4j.Logger;
+
/**
* This class is aimed at managing dictionary files for any new addition and deletion
* and calling of clear cache for BTree and dictionary instances from LRU cache
@@ -44,7 +45,7 @@ public class ManageDictionaryAndBTree {
/**
* Attribute for Carbon LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ManageDictionaryAndBTree.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
index 05de1d3..deb7339 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
@@ -27,7 +27,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.CacheType;
import org.apache.carbondata.core.cache.CarbonLRUCache;
@@ -35,6 +34,8 @@ import org.apache.carbondata.core.reader.CarbonDictionaryColumnMetaChunk;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.ObjectSizeCalculator;
+import org.apache.log4j.Logger;
+
/**
* This class implements methods to create dictionary cache which will hold
* dictionary chunks for look up of surrogate keys and values
@@ -46,7 +47,7 @@ public class ReverseDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
/**
* Attribute for Carbon LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ReverseDictionaryCache.class.getName());
private static final long sizeOfEmptyDictChunks =
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/constants/CarbonVersionConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonVersionConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonVersionConstants.java
index 22fbaf2..2382bd8 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonVersionConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonVersionConstants.java
@@ -20,12 +20,13 @@ package org.apache.carbondata.core.constants;
import java.io.InputStream;
import java.util.Properties;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
+
public final class CarbonVersionConstants {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonVersionConstants.class.getName());
/**
* the file name of CarbonData version info
@@ -63,13 +64,13 @@ public final class CarbonVersionConstants {
// read CARBONDATA_VERSION_INFO_FILE into props
props.load(resourceStream);
} catch (Exception e) {
- LOGGER.error(e, "Error loading properties from " + CARBONDATA_VERSION_INFO_FILE);
+ LOGGER.error("Error loading properties from " + CARBONDATA_VERSION_INFO_FILE, e);
} finally {
if (resourceStream != null) {
try {
resourceStream.close();
} catch (Exception e) {
- LOGGER.error(e, "Error closing CarbonData build info resource stream");
+ LOGGER.error("Error closing CarbonData build info resource stream", e);
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 22db211..75290d3 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -28,7 +28,6 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.exceptions.MetadataProcessException;
import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
import org.apache.carbondata.common.exceptions.sql.NoSuchDataMapException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.DataMapFactory;
@@ -54,6 +53,7 @@ import static org.apache.carbondata.core.metadata.schema.datamap.DataMapClassPro
import static org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider.PREAGGREGATE;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
/**
* It maintains all the DataMaps in it.
@@ -87,7 +87,7 @@ public final class DataMapStoreManager {
private DataMapSchemaStorageProvider provider = new DiskBasedDMSchemaStorageProvider(
CarbonProperties.getInstance().getSystemFolderLocation());
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataMapStoreManager.class.getName());
private DataMapStoreManager() {
@@ -292,7 +292,7 @@ public final class DataMapStoreManager {
dataMapCatalog.registerSchema(schema);
} catch (Exception e) {
// Ignore the schema
- LOGGER.error(e, "Error while registering schema");
+ LOGGER.error("Error while registering schema", e);
}
}
}
@@ -471,7 +471,7 @@ public final class DataMapStoreManager {
try {
DataMapUtil.executeDataMapJobForClearingDataMaps(carbonTable);
} catch (IOException e) {
- LOGGER.error(e, "clear dataMap job failed");
+ LOGGER.error("clear dataMap job failed", e);
// ignoring the exception
}
}
@@ -540,7 +540,7 @@ public final class DataMapStoreManager {
DataMapUtil.executeDataMapJobForClearingDataMaps(carbonTable);
tableDataMap.clear();
} catch (IOException e) {
- LOGGER.error(e, "clear dataMap job failed");
+ LOGGER.error("clear dataMap job failed", e);
// ignoring the exception
}
tableDataMap.deleteDatamapData();
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datamap/DataMapUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapUtil.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapUtil.java
index 60c5233..138bd62 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapUtil.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -33,12 +32,13 @@ import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
import org.apache.carbondata.core.util.ObjectSerializationUtil;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
public class DataMapUtil {
private static final String DATA_MAP_DSTR = "mapreduce.input.carboninputformat.datamapdstr";
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataMapUtil.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datamap/status/DiskBasedDataMapStatusProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/status/DiskBasedDataMapStatusProvider.java b/core/src/main/java/org/apache/carbondata/core/datamap/status/DiskBasedDataMapStatusProvider.java
index d42c98a..07fe93b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/status/DiskBasedDataMapStatusProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/status/DiskBasedDataMapStatusProvider.java
@@ -17,14 +17,20 @@
package org.apache.carbondata.core.datamap.status;
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.fileoperations.AtomicFileOperationFactory;
@@ -39,6 +45,7 @@ import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonUtil;
import com.google.gson.Gson;
+import org.apache.log4j.Logger;
/**
* It saves/serializes the array of {{@link DataMapStatusDetail}} to disk in json format.
@@ -47,7 +54,7 @@ import com.google.gson.Gson;
*/
public class DiskBasedDataMapStatusProvider implements DataMapStatusStorageProvider {
- private static final LogService LOG =
+ private static final Logger LOG =
LogServiceFactory.getLogService(DiskBasedDataMapStatusProvider.class.getName());
private static final String DATAMAP_STATUS_FILE = "datamapstatus";
@@ -72,7 +79,7 @@ public class DiskBasedDataMapStatusProvider implements DataMapStatusStorageProvi
buffReader = new BufferedReader(inStream);
dataMapStatusDetails = gsonObjectToRead.fromJson(buffReader, DataMapStatusDetail[].class);
} catch (IOException e) {
- LOG.error(e, "Failed to read datamap status");
+ LOG.error("Failed to read datamap status", e);
throw e;
} finally {
CarbonUtil.closeStreams(buffReader, inStream, dataInputStream);
@@ -141,7 +148,7 @@ public class DiskBasedDataMapStatusProvider implements DataMapStatusStorageProvi
} else {
String errorMsg = "Upadating datamapstatus is failed due to another process taken the lock"
+ " for updating it";
- LOG.audit(errorMsg);
+ Audit.log(LOG, errorMsg);
LOG.error(errorMsg);
throw new IOException(errorMsg + " Please try after some time.");
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
index 21e22b1..cc6341b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
@@ -25,7 +25,6 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.indexstore.schema.CarbonRowSchema;
@@ -36,6 +35,8 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.log4j.Logger;
+
/**
* Singleton class which will help in creating the segment properties
*/
@@ -44,7 +45,7 @@ public class SegmentPropertiesAndSchemaHolder {
/**
* Logger
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SegmentPropertiesAndSchemaHolder.class.getName());
/**
* SegmentPropertiesAndSchemaHolder instance
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/blocklet/BlockletEncodedColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/blocklet/BlockletEncodedColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/blocklet/BlockletEncodedColumnPage.java
index da4e211..00a7731 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/blocklet/BlockletEncodedColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/blocklet/BlockletEncodedColumnPage.java
@@ -24,7 +24,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.page.ActualDataBasedFallbackEncoder;
import org.apache.carbondata.core.datastore.page.DecoderBasedFallbackEncoder;
@@ -35,6 +34,8 @@ import org.apache.carbondata.core.localdictionary.generator.LocalDictionaryGener
import org.apache.carbondata.core.memory.MemoryException;
import org.apache.carbondata.format.LocalDictionaryChunk;
+import org.apache.log4j.Logger;
+
/**
* Maintains the list of encoded page of a column in a blocklet
* and encoded dictionary values only if column is encoded using local
@@ -47,7 +48,7 @@ public class BlockletEncodedColumnPage {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BlockletEncodedColumnPage.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
index 15f912a..c86011c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
@@ -20,15 +20,15 @@ package org.apache.carbondata.core.datastore.compression;
import java.io.IOException;
import java.lang.reflect.Field;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
import org.xerial.snappy.Snappy;
import org.xerial.snappy.SnappyNative;
public class SnappyCompressor implements Compressor {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SnappyCompressor.class.getName());
// snappy estimate max compressed length as 32 + source_len + source_len/6
@@ -61,7 +61,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.rawCompress(unCompInput, unCompInput.length);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -70,7 +70,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.rawCompress(unCompInput, byteSize);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -79,7 +79,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.uncompress(compInput);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -92,7 +92,7 @@ public class SnappyCompressor implements Compressor {
data = new byte[uncompressedLength];
Snappy.uncompress(compInput, offset, length, data, 0);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
return data;
@@ -102,7 +102,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.compress(unCompInput);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -111,7 +111,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.uncompressShortArray(compInput, offset, length);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -120,7 +120,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.compress(unCompInput);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -129,7 +129,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.uncompressIntArray(compInput, offset, length);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -138,7 +138,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.compress(unCompInput);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -147,7 +147,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.uncompressLongArray(compInput, offset, length);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -156,7 +156,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.compress(unCompInput);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -165,7 +165,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.uncompressFloatArray(compInput, offset, length);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -174,7 +174,7 @@ public class SnappyCompressor implements Compressor {
try {
return Snappy.compress(unCompInput);
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@@ -186,7 +186,7 @@ public class SnappyCompressor implements Compressor {
snappyNative.rawUncompress(compInput, offset, length, result, 0);
return result;
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
index c764430..d03aaf8 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
@@ -27,7 +27,6 @@ import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -51,12 +50,13 @@ import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.Lz4Codec;
import org.apache.hadoop.io.compress.SnappyCodec;
+import org.apache.log4j.Logger;
public abstract class AbstractDFSCarbonFile implements CarbonFile {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AbstractDFSCarbonFile.class.getName());
protected FileStatus fileStatus;
public FileSystem fs;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
index eabfa48..03fc353 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -29,14 +28,13 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
-
-
+import org.apache.log4j.Logger;
public class AlluxioCarbonFile extends AbstractDFSCarbonFile {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AlluxioCarbonFile.class.getName());
public AlluxioCarbonFile(String filePath) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
index ee1388f..a0cc462 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.hadoop.conf.Configuration;
@@ -29,12 +28,13 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.log4j.Logger;
public class HDFSCarbonFile extends AbstractDFSCarbonFile {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(HDFSCarbonFile.class.getName());
public HDFSCarbonFile(String filePath) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
index f0794f4..e026b27 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
@@ -36,7 +36,6 @@ import java.util.List;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -53,11 +52,12 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.log4j.Logger;
import org.xerial.snappy.SnappyInputStream;
import org.xerial.snappy.SnappyOutputStream;
public class LocalCarbonFile implements CarbonFile {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LocalCarbonFile.class.getName());
private File file;
@@ -118,8 +118,7 @@ public class LocalCarbonFile implements CarbonFile {
try {
return file.getCanonicalPath();
} catch (IOException e) {
- LOGGER
- .error(e, "Exception occured" + e.getMessage());
+ LOGGER.error("Exception occured" + e.getMessage(), e);
}
return null;
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
index 8c80065..c23ba69 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
@@ -20,7 +20,6 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -31,10 +30,11 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
public class S3CarbonFile extends HDFSCarbonFile {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(HDFSCarbonFile.class.getName());
public S3CarbonFile(String filePath) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
index 6650b9c..b86b139 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
@@ -20,7 +20,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -28,12 +27,13 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.viewfs.ViewFileSystem;
+import org.apache.log4j.Logger;
public class ViewFSCarbonFile extends AbstractDFSCarbonFile {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ViewFSCarbonFile.class.getName());
public ViewFSCarbonFile(String filePath) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
index e8f6cfb..8bd3c8e 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
@@ -25,7 +25,6 @@ import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.channels.FileChannel;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.FileReader;
@@ -37,12 +36,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.log4j.Logger;
public final class FileFactory {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(FileFactory.class.getName());
private static Configuration configuration = null;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
index ad19e27..3da154a 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
@@ -20,7 +20,6 @@ package org.apache.carbondata.core.datastore.page;
import java.io.IOException;
import java.math.BigDecimal;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.keygenerator.KeyGenException;
@@ -30,6 +29,8 @@ import org.apache.carbondata.core.localdictionary.PageLevelDictionary;
import org.apache.carbondata.core.localdictionary.exception.DictionaryThresholdReachedException;
import org.apache.carbondata.core.localdictionary.generator.LocalDictionaryGenerator;
+import org.apache.log4j.Logger;
+
/**
* Column page implementation for Local dictionary generated columns
* Its a decorator over two column page
@@ -41,7 +42,7 @@ public class LocalDictColumnPage extends ColumnPage {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LocalDictColumnPage.class.getName());
/**
@@ -132,8 +133,8 @@ public class LocalDictColumnPage extends ColumnPage {
encodedDataColumnPage.freeMemory();
encodedDataColumnPage = null;
} catch (KeyGenException e) {
- LOGGER.error(e, "Unable to generate key for: " + actualDataColumnPage
- .getColumnSpec().getFieldName());
+ LOGGER.error("Unable to generate key for: " + actualDataColumnPage
+ .getColumnSpec().getFieldName(), e);
throw new RuntimeException(e);
}
} else {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
index 44e7192..f38aef2 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
@@ -24,7 +24,6 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.ColumnType;
import org.apache.carbondata.core.datastore.TableSpec;
@@ -49,12 +48,14 @@ import org.apache.carbondata.format.PresenceMeta;
import static org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory.selectCodecByAlgorithmForFloating;
import static org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingFactory.selectCodecByAlgorithmForIntegral;
+import org.apache.log4j.Logger;
+
public abstract class ColumnPageEncoder {
/**
* logger
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ColumnPageEncoder.class.getName());
protected abstract byte[] encodeData(ColumnPage input) throws MemoryException, IOException;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClient.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClient.java b/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClient.java
index cf25ee1..2e58255 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClient.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClient.java
@@ -18,8 +18,8 @@ package org.apache.carbondata.core.dictionary.client;
import java.net.InetSocketAddress;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
import io.netty.bootstrap.Bootstrap;
@@ -29,13 +29,14 @@ import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
+import org.apache.log4j.Logger;
/**
* Dictionary client to connect to Dictionary server and generate dictionary values
*/
public class NonSecureDictionaryClient implements DictionaryClient {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(NonSecureDictionaryClient.class.getName());
private NonSecureDictionaryClientHandler nonSecureDictionaryClientHandler =
@@ -51,7 +52,7 @@ public class NonSecureDictionaryClient implements DictionaryClient {
*/
@Override public void startClient(String secretKey, String address, int port,
boolean encryptSecureServer) {
- LOGGER.audit("Starting client on " + address + " " + port);
+ Audit.log(LOGGER, "Starting client on " + address + " " + port);
long start = System.currentTimeMillis();
// Create an Event with 1 thread.
workerGroup = new NioEventLoopGroup(1);
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClientHandler.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClientHandler.java b/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClientHandler.java
index 3a76d84..457441f 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClientHandler.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/client/NonSecureDictionaryClientHandler.java
@@ -20,8 +20,8 @@ import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
import io.netty.buffer.ByteBuf;
@@ -29,13 +29,14 @@ import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
+import org.apache.log4j.Logger;
/**
* Client handler to get data.
*/
public class NonSecureDictionaryClientHandler extends ChannelInboundHandlerAdapter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(NonSecureDictionaryClientHandler.class.getName());
private final BlockingQueue<DictionaryMessage> responseMsgQueue = new LinkedBlockingQueue<>();
@@ -48,7 +49,7 @@ public class NonSecureDictionaryClientHandler extends ChannelInboundHandlerAdapt
public void channelActive(ChannelHandlerContext ctx) throws Exception {
this.ctx = ctx;
channelFutureListener = new DictionaryChannelFutureListener(ctx);
- LOGGER.audit("Connected client " + ctx);
+ Audit.log(LOGGER, "Connected client " + ctx);
super.channelActive(ctx);
}
@@ -68,7 +69,7 @@ public class NonSecureDictionaryClientHandler extends ChannelInboundHandlerAdapt
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
- LOGGER.error(cause, "exceptionCaught");
+ LOGGER.error("exceptionCaught", cause);
ctx.close();
}
@@ -85,7 +86,7 @@ public class NonSecureDictionaryClientHandler extends ChannelInboundHandlerAdapt
key.writeData(buffer);
ctx.writeAndFlush(buffer).addListener(channelFutureListener);
} catch (Exception e) {
- LOGGER.error(e, "Error while send request to server ");
+ LOGGER.error("Error while send request to server ", e);
ctx.close();
}
try {
@@ -118,7 +119,7 @@ public class NonSecureDictionaryClientHandler extends ChannelInboundHandlerAdapt
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if (!future.isSuccess()) {
- LOGGER.error(future.cause(), "Error while sending request to Dictionary Server");
+ LOGGER.error("Error while sending request to Dictionary Server", future.cause());
ctx.close();
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
index 8a69b80..bf0f094 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
@@ -22,8 +22,8 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
import org.apache.carbondata.core.cache.CacheType;
@@ -46,6 +46,7 @@ import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWrit
import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfo;
import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfoPreparator;
+import org.apache.log4j.Logger;
/**
* This generator does not maintain the whole cache of dictionary. It just maintains the cache only
@@ -55,7 +56,7 @@ import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfoPrepa
public class IncrementalColumnDictionaryGenerator implements BiDictionary<Integer, String>,
DictionaryGenerator<Integer, String>, DictionaryWriter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(IncrementalColumnDictionaryGenerator.class.getName());
private final Object lock = new Object();
@@ -147,7 +148,7 @@ public class IncrementalColumnDictionaryGenerator implements BiDictionary<Intege
long sortIndexWriteTime = System.currentTimeMillis() - t3;
// update Meta Data
updateMetaData(dictionaryWriter);
- LOGGER.audit("\n columnName: " + dimension.getColName() +
+ Audit.log(LOGGER, "\n columnName: " + dimension.getColName() +
"\n columnId: " + dimension.getColumnId() +
"\n new distinct values count: " + distinctValues.size() +
"\n create dictionary cache: " + dictCacheTime +
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
index 3dbe5b0..33a91d8 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
@@ -23,7 +23,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.devapi.BiDictionary;
import org.apache.carbondata.core.devapi.DictionaryGenerationException;
@@ -33,13 +32,15 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* Dictionary generation for table.
*/
public class TableDictionaryGenerator
implements DictionaryGenerator<Integer, DictionaryMessage>, DictionaryWriter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(TableDictionaryGenerator.class.getName());
private CarbonTable carbonTable;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServer.java b/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServer.java
index dc2d211..5d55416 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServer.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServer.java
@@ -18,7 +18,6 @@ package org.apache.carbondata.core.dictionary.server;
import java.net.InetSocketAddress;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
@@ -36,6 +35,7 @@ import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
+import org.apache.log4j.Logger;
/**
* Dictionary Server to generate dictionary keys.
@@ -43,7 +43,7 @@ import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
public class NonSecureDictionaryServer extends AbstractDictionaryServer
implements DictionaryServer {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(NonSecureDictionaryServer.class.getName());
private NonSecureDictionaryServerHandler nonSecureDictionaryServerHandler;
@@ -120,7 +120,7 @@ public class NonSecureDictionaryServer extends AbstractDictionaryServer
this.host = hostToBind;
break;
} catch (Exception e) {
- LOGGER.error(e, "Dictionary Server Failed to bind to port:" + newPort);
+ LOGGER.error("Dictionary Server Failed to bind to port:" + newPort, e);
if (i == 9) {
throw new RuntimeException("Dictionary Server Could not bind to any port");
}
[3/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j
Logger directly
Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
index 57b2e44..3f0eb71 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
@@ -28,7 +28,9 @@ import org.apache.spark.{Partition, SerializableWritable, SparkContext, TaskCont
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.converter.SparkDataTypeConverterImpl
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.datastore.block.SegmentProperties
@@ -335,7 +337,7 @@ object StreamHandoffRDD {
} catch {
case ex: Exception =>
loadStatus = SegmentStatus.LOAD_FAILURE
- LOGGER.error(ex, s"Handoff failed on streaming segment $handoffSegmenId")
+ LOGGER.error(s"Handoff failed on streaming segment $handoffSegmenId", ex)
errorMessage = errorMessage + ": " + ex.getCause.getMessage
LOGGER.error(errorMessage)
}
@@ -345,7 +347,7 @@ object StreamHandoffRDD {
LOGGER.info("********starting clean up**********")
CarbonLoaderUtil.deleteSegment(carbonLoadModel, carbonLoadModel.getSegmentId.toInt)
LOGGER.info("********clean up done**********")
- LOGGER.audit(s"Handoff is failed for " +
+ Audit.log(LOGGER, s"Handoff is failed for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.error("Cannot write load metadata file as handoff failed")
throw new Exception(errorMessage)
@@ -367,7 +369,7 @@ object StreamHandoffRDD {
.fireEvent(loadTablePostStatusUpdateEvent, operationContext)
if (!done) {
val errorMessage = "Handoff failed due to failure in table status updation."
- LOGGER.audit("Handoff is failed for " +
+ Audit.log(LOGGER, "Handoff is failed for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.error("Handoff failed due to failure in table status updation.")
throw new Exception(errorMessage)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
index 2cc2a5b..1e8d148 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
@@ -27,6 +27,7 @@ import scala.collection.mutable
import scala.util.Try
import com.univocity.parsers.common.TextParsingException
+import org.apache.log4j.Logger
import org.apache.spark.SparkException
import org.apache.spark.sql._
import org.apache.spark.sql.carbondata.execution.datasources.CarbonSparkDataSourceUtil
@@ -363,7 +364,7 @@ object CarbonScalaUtil {
/**
* Retrieve error message from exception
*/
- def retrieveAndLogErrorMsg(ex: Throwable, logger: LogService): (String, String) = {
+ def retrieveAndLogErrorMsg(ex: Throwable, logger: Logger): (String, String) = {
var errorMessage = "DataLoad failure"
var executorMessage = ""
if (ex != null) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala
index 67c4c9b..704382f 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/GlobalDictionaryUtil.scala
@@ -696,17 +696,17 @@ object GlobalDictionaryUtil {
} catch {
case ex: Exception =>
if (ex.getCause != null && ex.getCause.isInstanceOf[NoRetryException]) {
- LOGGER.error(ex.getCause, "generate global dictionary failed")
+ LOGGER.error("generate global dictionary failed", ex.getCause)
throw new Exception("generate global dictionary failed, " +
ex.getCause.getMessage)
}
ex match {
case spx: SparkException =>
- LOGGER.error(spx, "generate global dictionary failed")
+ LOGGER.error("generate global dictionary failed", spx)
throw new Exception("generate global dictionary failed, " +
trimErrorMessage(spx.getMessage))
case _ =>
- LOGGER.error(ex, "generate global dictionary failed")
+ LOGGER.error("generate global dictionary failed", ex)
throw ex
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index 5e0fe8b..e1dd0af 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -29,7 +29,9 @@ import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.util.CarbonException
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -280,7 +282,7 @@ class AlterTableColumnSchemaGenerator(
.foreach(f => if (f._2.size > 1) {
val name = f._1
LOGGER.error(s"Duplicate column found with name: $name")
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Validation failed for Create/Alter Table Operation " +
s"for ${ dbName }.${ alterTableModel.tableName }. " +
s"Duplicate column found with name: $name")
@@ -289,7 +291,7 @@ class AlterTableColumnSchemaGenerator(
if (newCols.exists(_.getDataType.isComplexType)) {
LOGGER.error(s"Complex column cannot be added")
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Validation failed for Create/Alter Table Operation " +
s"for ${ dbName }.${ alterTableModel.tableName }. " +
s"Complex column cannot be added")
@@ -780,7 +782,7 @@ class TableNewProcessor(cm: TableModel) {
if (f._2.size > 1) {
val name = f._1
LOGGER.error(s"Duplicate column found with name: $name")
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Validation failed for Create/Alter Table Operation " +
s"Duplicate column found with name: $name")
CarbonException.analysisException(s"Duplicate dimensions found with name: $name")
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
index 6d93b34..2fdbc86 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
@@ -283,7 +283,7 @@ object CarbonAppendableStreamSink {
case t: Throwable =>
val segmentDir = CarbonTablePath.getSegmentPath(carbonTable.getTablePath, segmentId)
StreamSegment.recoverSegmentIfRequired(segmentDir)
- LOGGER.error(t, s"Aborting job ${ job.getJobID }.")
+ LOGGER.error(s"Aborting job ${ job.getJobID }.", t)
committer.abortJob(job)
throw new CarbonStreamException("Job failed to write data file", t)
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
index e5552db..87106e0 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
@@ -24,7 +24,9 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.io.IOUtils
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.locks.HdfsFileLock
import org.apache.carbondata.core.util.CarbonUtil
@@ -48,7 +50,7 @@ object ResourceRegisterAndCopier {
if (!file.exists()) {
sys.error(s"""Provided path $hdfsPath does not exist""")
}
- LOGGER.audit("Try downloading resource data")
+ Audit.log(LOGGER, "Try downloading resource data")
val lock = new HdfsFileLock(hdfsPath, "/resource.lock")
var bool = false
try {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
----------------------------------------------------------------------
diff --git a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java b/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
index 6acf31f..5121027 100644
--- a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
+++ b/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
@@ -23,7 +23,6 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.CarbonDictionary;
-import org.apache.parquet.column.Encoding;
import org.apache.spark.sql.CarbonVectorProxy;
import org.apache.spark.sql.carbondata.execution.datasources.CarbonSparkDataSourceUtil;
import org.apache.spark.sql.types.Decimal;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java b/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
index 67ea497..779c62f 100644
--- a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
+++ b/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
@@ -23,7 +23,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
+import org.apache.log4j.Logger;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
@@ -65,7 +65,7 @@ import org.apache.spark.sql.types.StructType;
*/
public class VectorizedCarbonRecordReader extends AbstractRecordReader<Object> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(VectorizedCarbonRecordReader.class.getName());
private int batchIdx = 0;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala b/integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
index e3fec10..3f486d0 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
@@ -403,7 +403,7 @@ class IndexDataMapRebuildRDD[K, V](
reader.close()
} catch {
case ex: Throwable =>
- LOGGER.error(ex, "Failed to close reader")
+ LOGGER.error("Failed to close reader", ex)
}
}
@@ -412,7 +412,7 @@ class IndexDataMapRebuildRDD[K, V](
refresher.close()
} catch {
case ex: Throwable =>
- LOGGER.error(ex, "Failed to close index writer")
+ LOGGER.error("Failed to close index writer", ex)
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/AggregateDataMapCompactor.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/AggregateDataMapCompactor.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/AggregateDataMapCompactor.scala
index 82bae8e..a0bdd64 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/AggregateDataMapCompactor.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/AggregateDataMapCompactor.scala
@@ -25,6 +25,8 @@ import org.apache.spark.sql.execution.command.CompactionModel
import org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand
import org.apache.spark.sql.execution.command.preaaggregate.PreAggregateUtil
+import org.apache.carbondata.api.CarbonStore.LOGGER
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -130,8 +132,8 @@ class AggregateDataMapCompactor(carbonLoadModel: CarbonLoadModel,
carbonLoadModel.getTableName)
LOGGER
.info(s"Compaction request for datamap ${ carbonTable.getTableUniqueName } is successful")
- LOGGER
- .audit(s"Compaction request for datamap ${carbonTable.getTableUniqueName} is successful")
+ Audit.log(LOGGER,
+ s"Compaction request for datamap ${carbonTable.getTableUniqueName} is successful")
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index 0ec3bc6..4f42139 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -45,8 +45,10 @@ import org.apache.spark.sql.hive.DistributionUtil
import org.apache.spark.sql.optimizer.CarbonFilters
import org.apache.spark.sql.util.{CarbonException, SparkSQLUtil}
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.constants.LoggerAction
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.{DataMapStoreManager, Segment}
import org.apache.carbondata.core.datamap.status.DataMapStatusManager
@@ -130,7 +132,7 @@ object CarbonDataRDDFactory {
}
}
} else {
- LOGGER.audit("Not able to acquire the system level compaction lock for table " +
+ Audit.log(LOGGER, "Not able to acquire the system level compaction lock for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.error("Not able to acquire the compaction lock for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
@@ -307,7 +309,7 @@ object CarbonDataRDDFactory {
dataFrame: Option[DataFrame] = None,
updateModel: Option[UpdateTableModel] = None,
operationContext: OperationContext): Unit = {
- LOGGER.audit(s"Data load request has been received for table" +
+ Audit.log(LOGGER, s"Data load request has been received for table" +
s" ${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
// Check if any load need to be deleted before loading new data
val carbonTable = carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable
@@ -449,7 +451,7 @@ object CarbonDataRDDFactory {
// this means that the update doesnt have any records to update so no need to do table
// status file updation.
if (resultSize == 0) {
- LOGGER.audit("Data update is successful with 0 rows updation for " +
+ Audit.log(LOGGER, "Data update is successful with 0 rows updation for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
return
}
@@ -460,11 +462,11 @@ object CarbonDataRDDFactory {
true,
new util.ArrayList[Segment](0),
new util.ArrayList[Segment](segmentFiles), "")) {
- LOGGER.audit("Data update is successful for " +
+ Audit.log(LOGGER, "Data update is successful for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
} else {
val errorMessage = "Data update failed due to failure in table status updation."
- LOGGER.audit("Data update is failed for " +
+ Audit.log(LOGGER, "Data update is failed for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.error("Data update failed due to failure in table status updation.")
updateModel.get.executorErrors.errorMsg = errorMessage
@@ -486,7 +488,7 @@ object CarbonDataRDDFactory {
clearDataMapFiles(carbonTable, carbonLoadModel.getSegmentId)
}
LOGGER.info("********clean up done**********")
- LOGGER.audit(s"Data load is failed for " +
+ Audit.log(LOGGER, s"Data load is failed for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.warn("Cannot write load metadata file as data load failed")
throw new Exception(errorMessage)
@@ -505,7 +507,7 @@ object CarbonDataRDDFactory {
clearDataMapFiles(carbonTable, carbonLoadModel.getSegmentId)
}
LOGGER.info("********clean up done**********")
- LOGGER.audit(s"Data load is failed for " +
+ Audit.log(LOGGER, s"Data load is failed for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
throw new Exception(status(0)._2._2.errorMsg)
}
@@ -557,7 +559,7 @@ object CarbonDataRDDFactory {
true
} catch {
case ex: Exception =>
- LOGGER.error(ex, "Problem while committing data maps")
+ LOGGER.error("Problem while committing data maps", ex)
false
}
if (!done || !commitComplete) {
@@ -573,16 +575,16 @@ object CarbonDataRDDFactory {
clearDataMapFiles(carbonTable, carbonLoadModel.getSegmentId)
}
LOGGER.info("********clean up done**********")
- LOGGER.audit("Data load is failed for " +
+ Audit.log(LOGGER, "Data load is failed for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.error("Data load failed due to failure in table status updation.")
throw new Exception("Data load failed due to failure in table status updation.")
}
if (SegmentStatus.LOAD_PARTIAL_SUCCESS == loadStatus) {
- LOGGER.audit("Data load is partially successful for " +
+ Audit.log(LOGGER, "Data load is partially successful for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
} else {
- LOGGER.audit("Data load is successful for " +
+ Audit.log(LOGGER, "Data load is successful for " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
}
try {
@@ -843,7 +845,7 @@ object CarbonDataRDDFactory {
s" ${ CarbonDataMergerUtil.checkIfAutoLoadMergingRequired(carbonTable) }")
if (!carbonTable.isChildDataMap &&
CarbonDataMergerUtil.checkIfAutoLoadMergingRequired(carbonTable)) {
- LOGGER.audit(s"Compaction request received for table " +
+ Audit.log(LOGGER, s"Compaction request received for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
val compactionSize = 0
val isCompactionTriggerByDDl = false
@@ -903,7 +905,7 @@ object CarbonDataRDDFactory {
throw e
}
} else {
- LOGGER.audit("Not able to acquire the compaction lock for table " +
+ Audit.log(LOGGER, "Not able to acquire the compaction lock for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName}")
LOGGER.error("Not able to acquire the compaction lock for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName}")
@@ -946,7 +948,7 @@ object CarbonDataRDDFactory {
if (!done) {
val errorMessage = s"Dataload failed due to failure in table status updation for" +
s" ${carbonLoadModel.getTableName}"
- LOGGER.audit("Data load is failed for " +
+ Audit.log(LOGGER, "Data load is failed for " +
s"${carbonLoadModel.getDatabaseName}.${carbonLoadModel.getTableName}")
LOGGER.error("Dataload failed due to failure in table status updation.")
throw new Exception(errorMessage)
@@ -1087,7 +1089,7 @@ object CarbonDataRDDFactory {
).collect()
} catch {
case ex: Exception =>
- LOGGER.error(ex, "load data failed for partition table")
+ LOGGER.error("load data failed for partition table", ex)
throw ex
}
}
@@ -1120,7 +1122,7 @@ object CarbonDataRDDFactory {
).collect()
} catch {
case ex: Exception =>
- LOGGER.error(ex, "load data frame failed")
+ LOGGER.error("load data frame failed", ex)
throw ex
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
index c505bbc..756d30c 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
@@ -27,6 +27,8 @@ import scala.collection.mutable
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.execution.command.{CarbonMergerMapping, CompactionCallableModel, CompactionModel}
+import org.apache.carbondata.api.CarbonStore.LOGGER
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.{DataMapStoreManager, Segment}
import org.apache.carbondata.core.metadata.SegmentFileStore
@@ -68,7 +70,7 @@ class CarbonTableCompactor(carbonLoadModel: CarbonLoadModel,
scanSegmentsAndSubmitJob(loadsToMerge, compactedSegments)
} catch {
case e: Exception =>
- LOGGER.error(e, s"Exception in compaction thread ${ e.getMessage }")
+ LOGGER.error(s"Exception in compaction thread ${ e.getMessage }", e)
throw e
}
@@ -302,7 +304,8 @@ class CarbonTableCompactor(carbonLoadModel: CarbonLoadModel,
// true because compaction for all datamaps will be finished at a time to the maximum level
// possible (level 1, 2 etc). so we need to check for either condition
if (!statusFileUpdation || !commitComplete) {
- LOGGER.audit(s"Compaction request failed for table ${ carbonLoadModel.getDatabaseName }." +
+ Audit.log(LOGGER,
+ s"Compaction request failed for table ${ carbonLoadModel.getDatabaseName }." +
s"${ carbonLoadModel.getTableName }")
LOGGER.error(s"Compaction request failed for table ${ carbonLoadModel.getDatabaseName }." +
s"${ carbonLoadModel.getTableName }")
@@ -310,13 +313,14 @@ class CarbonTableCompactor(carbonLoadModel: CarbonLoadModel,
s" ${ carbonLoadModel.getDatabaseName }." +
s"${ carbonLoadModel.getTableName }")
} else {
- LOGGER.audit(s"Compaction request completed for table " +
+ Audit.log(LOGGER,
+ s"Compaction request completed for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.info(s"Compaction request completed for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
}
} else {
- LOGGER.audit(s"Compaction request failed for table " +
+ Audit.log(LOGGER, s"Compaction request failed for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }"
)
LOGGER.error(s"Compaction request failed for table " +
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
index 320cd78..7edc50f 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
@@ -37,7 +37,7 @@ abstract class Compactor(carbonLoadModel: CarbonLoadModel,
sqlContext: SQLContext,
storeLocation: String) {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def executeCompaction(): Unit
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala b/integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
index 23323d4..1b9fb44 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
@@ -26,9 +26,11 @@ import org.apache.spark.sql.carbondata.execution.datasources.CarbonSparkDataSour
import org.apache.spark.sql.streaming.StreamingQuery
import org.apache.spark.sql.types.{StructField, StructType}
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.exceptions.NoSuchStreamException
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.locks.{CarbonLockFactory, LockUsage}
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat
@@ -158,7 +160,7 @@ object StreamJobManager {
StreamJobDesc(job, streamName, sourceTable.getDatabaseName, sourceTable.getTableName,
sinkTable.getDatabaseName, sinkTable.getTableName, query, thread))
- LOGGER.audit(s"STREAM $streamName started with job id '${job.id.toString}', " +
+ Audit.log(LOGGER, s"STREAM $streamName started with job id '${job.id.toString}', " +
s"from ${sourceTable.getDatabaseName}.${sourceTable.getTableName} " +
s"to ${sinkTable.getDatabaseName}.${sinkTable.getTableName}")
job.id.toString
@@ -179,7 +181,8 @@ object StreamJobManager {
jobDesc.streamingQuery.stop()
jobDesc.thread.interrupt()
jobs.remove(streamName)
- LOGGER.audit(s"STREAM $streamName stopped, job id '${jobDesc.streamingQuery.id.toString}', " +
+ Audit.log(LOGGER,
+ s"STREAM $streamName stopped, job id '${jobDesc.streamingQuery.id.toString}', " +
s"from ${jobDesc.sourceDb}.${jobDesc.sourceTable} " +
s"to ${jobDesc.sinkDb}.${jobDesc.sinkTable}")
} else {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
index 838b28d..7eb6e88 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
@@ -170,7 +170,7 @@ class CarbonSession(@transient val sc: SparkContext,
*/
private def trySearchMode(qe: QueryExecution, sse: SQLStart): DataFrame = {
val analyzed = qe.analyzed
- val LOG: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOG = LogServiceFactory.getLogService(this.getClass.getName)
analyzed match {
case _@Project(columns, _@Filter(expr, s: SubqueryAlias))
if s.child.isInstanceOf[LogicalRelation] &&
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
index 1a76ed7..2d4fe84 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
@@ -23,7 +23,8 @@ import scala.collection.mutable.ListBuffer
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datamap.DataMapStoreManager
import org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
@@ -31,12 +32,12 @@ import org.apache.carbondata.datamap.CarbonMergeBloomIndexFilesRDD
import org.apache.carbondata.events._
class MergeBloomIndexEventListener extends OperationEventListener with Logging {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
override def onEvent(event: Event, operationContext: OperationContext): Unit = {
event match {
case datamapPostEvent: BuildDataMapPostExecutionEvent =>
- LOGGER.audit("Load post status event-listener called for merge bloom index")
+ Audit.log(LOGGER, "Load post status event-listener called for merge bloom index")
val carbonTableIdentifier = datamapPostEvent.identifier
val carbonTable = DataMapStoreManager.getInstance().getCarbonTable(carbonTableIdentifier)
val tableDataMaps = DataMapStoreManager.getInstance().getAllDataMap(carbonTable)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
index a0c19e9..639a0e3 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
@@ -23,11 +23,11 @@ import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.spark.internal.Logging
-import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.util.CarbonException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.locks.{CarbonLockFactory, LockUsage}
@@ -40,12 +40,12 @@ import org.apache.carbondata.processing.merger.CarbonDataMergerUtil
import org.apache.carbondata.spark.util.CommonUtil
class MergeIndexEventListener extends OperationEventListener with Logging {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
override def onEvent(event: Event, operationContext: OperationContext): Unit = {
event match {
case preStatusUpdateEvent: LoadTablePostExecutionEvent =>
- LOGGER.audit("Load post status event-listener called for merge index")
+ Audit.log(LOGGER, "Load post status event-listener called for merge index")
val loadModel = preStatusUpdateEvent.getCarbonLoadModel
val carbonTable = loadModel.getCarbonDataLoadSchema.getCarbonTable
val compactedSegments = loadModel.getMergedSegmentIds
@@ -71,7 +71,7 @@ class MergeIndexEventListener extends OperationEventListener with Logging {
}
}
case alterTableCompactionPostEvent: AlterTableCompactionPostEvent =>
- LOGGER.audit("Merge index for compaction called")
+ Audit.log(LOGGER, "Merge index for compaction called")
val carbonTable = alterTableCompactionPostEvent.carbonTable
val mergedLoads = alterTableCompactionPostEvent.compactedLoads
val sparkSession = alterTableCompactionPostEvent.sparkSession
@@ -79,11 +79,10 @@ class MergeIndexEventListener extends OperationEventListener with Logging {
mergeIndexFilesForCompactedSegments(sparkSession, carbonTable, mergedLoads)
}
case alterTableMergeIndexEvent: AlterTableMergeIndexEvent =>
- val alterTableModel = alterTableMergeIndexEvent.alterTableModel
val carbonMainTable = alterTableMergeIndexEvent.carbonTable
val sparkSession = alterTableMergeIndexEvent.sparkSession
if (!carbonMainTable.isStreamingSink) {
- LOGGER.audit(s"Compaction request received for table " +
+ Audit.log(LOGGER, s"Compaction request received for table " +
s"${ carbonMainTable.getDatabaseName }.${ carbonMainTable.getTableName }")
LOGGER.info(s"Merge Index request received for table " +
s"${ carbonMainTable.getDatabaseName }.${ carbonMainTable.getTableName }")
@@ -129,7 +128,7 @@ class MergeIndexEventListener extends OperationEventListener with Logging {
clearBlockDataMapCache(carbonMainTable, validSegmentIds)
val requestMessage = "Compaction request completed for table " +
s"${ carbonMainTable.getDatabaseName }.${ carbonMainTable.getTableName }"
- LOGGER.audit(requestMessage)
+ Audit.log(LOGGER, requestMessage)
LOGGER.info(requestMessage)
} else {
val lockMessage = "Not able to acquire the compaction lock for table " +
@@ -138,7 +137,7 @@ class MergeIndexEventListener extends OperationEventListener with Logging {
.getTableName
}"
- LOGGER.audit(lockMessage)
+ Audit.log(LOGGER, lockMessage)
LOGGER.error(lockMessage)
CarbonException.analysisException(
"Table is already locked for compaction. Please try after some time.")
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
index 66f9e47..081482c 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
@@ -22,8 +22,10 @@ import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.command._
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.exceptions.sql.{MalformedCarbonCommandException, MalformedDataMapCommandException}
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datamap.{DataMapProvider, DataMapStoreManager}
import org.apache.carbondata.core.datamap.status.DataMapStatusManager
import org.apache.carbondata.core.metadata.ColumnarFormatVersion
@@ -151,7 +153,7 @@ case class CarbonCreateDataMapCommand(
systemFolderLocation, tableIdentifier, dmProviderName)
OperationListenerBus.getInstance().fireEvent(createDataMapPostExecutionEvent,
operationContext)
- LOGGER.audit(s"DataMap $dataMapName successfully added")
+ Audit.log(LOGGER, s"DataMap $dataMapName successfully added")
Seq.empty
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
index 4607de0..67e2dee 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
@@ -28,7 +28,8 @@ import org.apache.spark.sql.execution.command.preaaggregate.PreAggregateUtil
import org.apache.spark.sql.execution.command.table.CarbonDropTableCommand
import org.apache.carbondata.common.exceptions.sql.NoSuchDataMapException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datamap.{DataMapProvider, DataMapStoreManager}
import org.apache.carbondata.core.datamap.status.DataMapStatusManager
import org.apache.carbondata.core.locks.{CarbonLockUtil, ICarbonLock, LockUsage}
@@ -52,7 +53,7 @@ case class CarbonDropDataMapCommand(
forceDrop: Boolean = false)
extends AtomicRunnableCommand {
- private val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
private var dataMapProvider: DataMapProvider = _
var mainTable: CarbonTable = _
var dataMapSchema: DataMapSchema = _
@@ -111,7 +112,7 @@ case class CarbonDropDataMapCommand(
locksToBeAcquired foreach {
lock => carbonLocks += CarbonLockUtil.getLockObject(tableIdentifier, lock)
}
- LOGGER.audit(s"Deleting datamap [$dataMapName] under table [$tableName]")
+ Audit.log(LOGGER, s"Deleting datamap [$dataMapName] under table [$tableName]")
// drop index,mv datamap on the main table.
if (mainTable != null &&
@@ -172,7 +173,7 @@ case class CarbonDropDataMapCommand(
case e: NoSuchDataMapException =>
throw e
case ex: Exception =>
- LOGGER.error(ex, s"Dropping datamap $dataMapName failed")
+ LOGGER.error(s"Dropping datamap $dataMapName failed", ex)
throwMetadataException(dbName, tableName,
s"Dropping datamap $dataMapName failed: ${ ex.getMessage }")
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
index b699ec1..8e338db 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
@@ -25,14 +25,15 @@ import scala.collection.JavaConverters._
import org.apache.spark.sql.{CarbonEnv, Row, SparkSession, SQLContext}
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.execution.command.{AlterTableModel, AtomicRunnableCommand, CarbonMergerMapping, CompactionModel}
+import org.apache.spark.sql.execution.command.{AlterTableModel, AtomicRunnableCommand, CompactionModel}
import org.apache.spark.sql.hive.{CarbonRelation, CarbonSessionCatalog}
import org.apache.spark.sql.optimizer.CarbonFilters
import org.apache.spark.sql.util.CarbonException
import org.apache.spark.util.AlterTableUtil
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.compression.CompressorFactory
import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -63,7 +64,7 @@ case class CarbonAlterTableCompactionCommand(
var table: CarbonTable = _
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val tableName = alterTableModel.tableName.toLowerCase
val dbName = alterTableModel.dbName.getOrElse(sparkSession.catalog.currentDatabase)
table = if (tableInfoOp.isDefined) {
@@ -204,7 +205,7 @@ case class CarbonAlterTableCompactionCommand(
storeLocation: String,
compactedSegments: java.util.List[String],
operationContext: OperationContext): Unit = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
val compactionType = CompactionType.valueOf(alterTableModel.compactionType.toUpperCase)
val compactionSize = CarbonDataMergerUtil.getCompactionSize(compactionType, carbonLoadModel)
if (CompactionType.IUD_UPDDEL_DELTA == compactionType) {
@@ -216,7 +217,7 @@ case class CarbonAlterTableCompactionCommand(
}
}
- LOGGER.audit(s"Compaction request received for table " +
+ Audit.log(LOGGER, s"Compaction request received for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
val carbonTable = carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable
@@ -313,7 +314,7 @@ case class CarbonAlterTableCompactionCommand(
throw e
}
} else {
- LOGGER.audit("Not able to acquire the compaction lock for table " +
+ Audit.log(LOGGER, "Not able to acquire the compaction lock for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
LOGGER.error(s"Not able to acquire the compaction lock for table" +
s" ${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
@@ -328,7 +329,7 @@ case class CarbonAlterTableCompactionCommand(
operationContext: OperationContext,
sparkSession: SparkSession
): Unit = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
val carbonTable = carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable
// 1. delete the lock of streaming.lock, forcing the stream to be closed
val streamingLock = CarbonLockFactory.getCarbonLockObj(
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
index a477167..ba20773 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
@@ -33,7 +33,7 @@ case class CarbonAlterTableFinishStreaming(
extends MetadataCommand {
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
val carbonTable = CarbonEnv.getCarbonTable(dbName, tableName)(sparkSession)
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val streamingLock = CarbonLockFactory.getCarbonLockObj(
carbonTable.getTableInfo().getOrCreateAbsoluteTableIdentifier(),
LockUsage.STREAMING_LOCK)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
index e561a5a..a390191 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
@@ -147,7 +147,7 @@ case class CarbonCleanFilesCommand(
case e: Throwable =>
// catch all exceptions to avoid failure
LogServiceFactory.getLogService(this.getClass.getCanonicalName)
- .error(e, "Failed to clean in progress segments")
+ .error("Failed to clean in progress segments", e)
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
index 7cf8c1e..ee0f5ab 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
@@ -36,7 +36,7 @@ case class CarbonInsertIntoCommand(
var loadCommand: CarbonLoadDataCommand = _
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
def containsLimit(plan: LogicalPlan): Boolean = {
plan find {
case limit: GlobalLimit => true
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
index 43c8b86..22d0bb3 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
@@ -27,6 +27,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.conf.Configuration
+import org.apache.log4j.Logger
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
import org.apache.spark.sql._
@@ -48,7 +49,8 @@ import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.{CarbonReflectionUtils, CausedBy, FileUtils}
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.converter.SparkDataTypeConverterImpl
import org.apache.carbondata.core.constants.{CarbonCommonConstants, CarbonLoadOptionConstants}
import org.apache.carbondata.core.datamap.DataMapStoreManager
@@ -109,7 +111,7 @@ case class CarbonLoadDataCommand(
var parentTablePath: String = _
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val dbName = CarbonEnv.getDatabaseName(databaseNameOp)(sparkSession)
table = if (tableInfoOp.isDefined) {
CarbonTable.buildFromTableInfo(tableInfoOp.get)
@@ -121,7 +123,7 @@ case class CarbonLoadDataCommand(
}
if (null == relation.carbonTable) {
LOGGER.error(s"Data loading failed. table not found: $dbName.$tableName")
- LOGGER.audit(s"Data loading failed. table not found: $dbName.$tableName")
+ Audit.log(LOGGER, s"Data loading failed. table not found: $dbName.$tableName")
throw new NoSuchTableException(dbName, tableName)
}
relation.carbonTable
@@ -150,7 +152,7 @@ case class CarbonLoadDataCommand(
}
override def processData(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val carbonProperty: CarbonProperties = CarbonProperties.getInstance()
var concurrentLoadLock: Option[ICarbonLock] = None
carbonProperty.addProperty("zookeeper.enable.lock", "false")
@@ -341,7 +343,7 @@ case class CarbonLoadDataCommand(
if (isUpdateTableStatusRequired) {
CarbonLoaderUtil.updateTableStatusForFailure(carbonLoadModel, uuid)
}
- LOGGER.error(ex, s"Dataload failure for $dbName.$tableName")
+ LOGGER.error(s"Dataload failure for $dbName.$tableName", ex)
throw new RuntimeException(s"Dataload failure for $dbName.$tableName, ${ex.getMessage}")
// In case of event related exception
case preEventEx: PreEventException =>
@@ -352,7 +354,7 @@ case class CarbonLoadDataCommand(
if (isUpdateTableStatusRequired) {
CarbonLoaderUtil.updateTableStatusForFailure(carbonLoadModel, uuid)
}
- LOGGER.audit(s"Dataload failure for $dbName.$tableName. Please check the logs")
+ Audit.log(LOGGER, s"Dataload failure for $dbName.$tableName. Please check the logs")
throw ex
} finally {
releaseConcurrentLoadLock(concurrentLoadLock, LOGGER)
@@ -369,7 +371,7 @@ case class CarbonLoadDataCommand(
} catch {
case ex: Exception =>
LOGGER.error(ex)
- LOGGER.audit(s"Dataload failure for $dbName.$tableName. " +
+ Audit.log(LOGGER, s"Dataload failure for $dbName.$tableName. " +
"Problem deleting the partition folder")
throw ex
}
@@ -377,10 +379,10 @@ case class CarbonLoadDataCommand(
}
} catch {
case dle: DataLoadingException =>
- LOGGER.audit(s"Dataload failed for $dbName.$tableName. " + dle.getMessage)
+ Audit.log(LOGGER, s"Dataload failed for $dbName.$tableName. " + dle.getMessage)
throw dle
case mce: MalformedCarbonCommandException =>
- LOGGER.audit(s"Dataload failed for $dbName.$tableName. " + mce.getMessage)
+ Audit.log(LOGGER, s"Dataload failed for $dbName.$tableName. " + mce.getMessage)
throw mce
}
Seq.empty
@@ -412,7 +414,7 @@ case class CarbonLoadDataCommand(
}
private def releaseConcurrentLoadLock(concurrentLoadLock: Option[ICarbonLock],
- LOGGER: LogService): Unit = {
+ LOGGER: Logger): Unit = {
if (concurrentLoadLock.isDefined) {
if (concurrentLoadLock.get.unlock()) {
LOGGER.info("concurrent_load lock for table" + table.getTablePath +
@@ -432,7 +434,7 @@ case class CarbonLoadDataCommand(
partitionStatus: SegmentStatus,
hadoopConf: Configuration,
operationContext: OperationContext,
- LOGGER: LogService): Seq[Row] = {
+ LOGGER: Logger): Seq[Row] = {
var rows = Seq.empty[Row]
val carbonTable = carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable
val carbonTableIdentifier = carbonTable.getAbsoluteTableIdentifier
@@ -561,7 +563,7 @@ case class CarbonLoadDataCommand(
partitionStatus: SegmentStatus,
hadoopConf: Configuration,
operationContext: OperationContext,
- LOGGER: LogService): Seq[Row] = {
+ LOGGER: Logger): Seq[Row] = {
var rows = Seq.empty[Row]
val (dictionaryDataFrame, loadDataFrame) = if (updateModel.isDefined) {
val dataFrameWithTupleId: DataFrame = getDataFrameWithTupleID()
@@ -615,9 +617,8 @@ case class CarbonLoadDataCommand(
hadoopConf: Configuration,
dataFrame: Option[DataFrame],
operationContext: OperationContext,
- LOGGER: LogService): Seq[Row] = {
+ LOGGER: Logger): Seq[Row] = {
val table = carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable
- val identifier = TableIdentifier(table.getTableName, Some(table.getDatabaseName))
val catalogTable: CatalogTable = logicalPartitionRelation.catalogTable.get
var timeStampformatString = carbonLoadModel.getTimestampformat
if (timeStampformatString.isEmpty) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
index cf88fb9..39e85ba 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
@@ -27,7 +27,8 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.execution.command.{AlterTableAddPartitionCommand, MetadataCommand}
import org.apache.spark.sql.execution.command.table.CarbonCreateTableCommand
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.indexstore.PartitionSpec
@@ -47,8 +48,7 @@ case class RefreshCarbonTableCommand(
databaseNameOp: Option[String],
tableName: String)
extends MetadataCommand {
- val LOGGER: LogService =
- LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
@@ -88,7 +88,7 @@ case class RefreshCarbonTableCommand(
val msg = s"Table registration with Database name [$databaseName] and Table name " +
s"[$tableName] failed. All the aggregate Tables for table [$tableName] is" +
s" not copied under database [$databaseName]"
- LOGGER.audit(msg)
+ Audit.log(LOGGER, msg)
throwMetadataException(databaseName, tableName, msg)
}
// 2.2.1 Register the aggregate tables to hive
@@ -101,14 +101,14 @@ case class RefreshCarbonTableCommand(
registerAllPartitionsToHive(identifier, sparkSession)
}
} else {
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Table registration with Database name [$databaseName] and Table name [$tableName] " +
s"failed." +
s"Table [$tableName] either non carbon table or stale carbon table under database " +
s"[$databaseName]")
}
} else {
- LOGGER.audit(
+ Audit.log(LOGGER,
s"Table registration with Database name [$databaseName] and Table name [$tableName] " +
s"failed." +
s"Table [$tableName] either already exists or registered under database [$databaseName]")
@@ -154,7 +154,7 @@ case class RefreshCarbonTableCommand(
OperationListenerBus.getInstance.fireEvent(refreshTablePreExecutionEvent, operationContext)
CarbonCreateTableCommand(tableInfo, ifNotExistsSet = false, tableLocation = Some(tablePath))
.run(sparkSession)
- LOGGER.audit(s"Table registration with Database name [$dbName] and Table name " +
+ Audit.log(LOGGER, s"Table registration with Database name [$dbName] and Table name " +
s"[$tableName] is successful.")
} catch {
case e: AnalysisException => throw e
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
index 0127d7e..053937b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
@@ -21,8 +21,10 @@ import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.command._
+import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.exception.ConcurrentOperationException
import org.apache.carbondata.core.features.TableOperation
import org.apache.carbondata.core.locks.{CarbonLockFactory, CarbonLockUtil, LockUsage}
@@ -79,7 +81,7 @@ private[sql] case class CarbonProjectForDeleteCommand(
var lockStatus = false
try {
lockStatus = metadataLock.lockWithRetries()
- LOGGER.audit(s" Delete data request has been received " +
+ Audit.log(LOGGER, s" Delete data request has been received " +
s"for ${carbonTable.getDatabaseName}.${carbonTable.getTableName}.")
if (lockStatus) {
LOGGER.info("Successfully able to get the table metadata file lock")
@@ -119,7 +121,7 @@ private[sql] case class CarbonProjectForDeleteCommand(
CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, e.compactionTimeStamp.toString)
case e: Exception =>
- LOGGER.error(e, "Exception in Delete data operation " + e.getMessage)
+ LOGGER.error("Exception in Delete data operation " + e.getMessage, e)
// ****** start clean up.
// In case of failure , clean all related delete delta files
CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, timestamp)
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
index 4e9c1af..31e1779 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
@@ -163,7 +163,7 @@ private[sql] case class CarbonProjectForUpdateCommand(
CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, e.compactionTimeStamp.toString)
case e: Exception =>
- LOGGER.error(e, "Exception in update operation")
+ LOGGER.error("Exception in update operation", e)
// ****** start clean up.
// In case of failure , clean all related delete delta files
CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, currentTime + "")
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
index 7e7f671..d118539 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
@@ -20,9 +20,7 @@ package org.apache.spark.sql.execution.command.mutation
import java.util
import scala.collection.JavaConverters._
-import scala.reflect.ClassTag
-import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce.Job
@@ -34,7 +32,9 @@ import org.apache.spark.sql.execution.command.ExecutionErrors
import org.apache.spark.sql.optimizer.CarbonFilters
import org.apache.spark.sql.util.SparkSQLUtil
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.api.CarbonStore.LOGGER
+import org.apache.carbondata.common.logging.impl.Audit
+import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -46,13 +46,12 @@ import org.apache.carbondata.core.util.{CarbonUtil, ThreadLocalSessionInfo}
import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.core.writer.CarbonDeleteDeltaWriterImpl
import org.apache.carbondata.hadoop.api.{CarbonInputFormat, CarbonTableInputFormat}
-import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil
import org.apache.carbondata.processing.exception.MultipleMatchingException
import org.apache.carbondata.processing.loading.FailureCauses
import org.apache.carbondata.spark.DeleteDelataResultImpl
object DeleteExecution {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
/**
* generate the delete delta files in each segment as per the RDD.
@@ -167,7 +166,7 @@ object DeleteExecution {
} else {
// In case of failure , clean all related delete delta files
CarbonUpdateUtil.cleanStaleDeltaFiles(carbonTable, timestamp)
- LOGGER.audit(s"Delete data operation is failed for ${ database }.${ tableName }")
+ Audit.log(LOGGER, s"Delete data operation is failed for ${ database }.${ tableName }")
val errorMsg =
"Delete data operation is failed due to failure in creating delete delta file for " +
"segment : " + resultOfBlock._2._1.getSegmentName + " block : " +
@@ -202,7 +201,7 @@ object DeleteExecution {
listOfSegmentToBeMarkedDeleted)
) {
LOGGER.info(s"Delete data operation is successful for ${ database }.${ tableName }")
- LOGGER.audit(s"Delete data operation is successful for ${ database }.${ tableName }")
+ Audit.log(LOGGER, s"Delete data operation is successful for ${ database }.${ tableName }")
}
else {
// In case of failure , clean all related delete delta files
@@ -210,7 +209,7 @@ object DeleteExecution {
val errorMessage = "Delete data operation is failed due to failure " +
"in table status updation."
- LOGGER.audit(s"Delete data operation is failed for ${ database }.${ tableName }")
+ Audit.log(LOGGER, s"Delete data operation is failed for ${ database }.${ tableName }")
LOGGER.error("Delete data operation is failed due to failure in table status updation.")
executorErrors.failureCauses = FailureCauses.STATUS_FILE_UPDATION_FAILURE
executorErrors.errorMsg = errorMessage
@@ -291,12 +290,12 @@ object DeleteExecution {
deleteStatus = SegmentStatus.SUCCESS
} catch {
case e : MultipleMatchingException =>
- LOGGER.audit(e.getMessage)
+ Audit.log(LOGGER, e.getMessage)
LOGGER.error(e.getMessage)
// dont throw exception here.
case e: Exception =>
val errorMsg = s"Delete data operation is failed for ${ database }.${ tableName }."
- LOGGER.audit(errorMsg)
+ Audit.log(LOGGER, errorMsg)
LOGGER.error(errorMsg + e.getMessage)
throw e
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
index 35fc3c3..3472d8a 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
@@ -25,21 +25,20 @@ import scala.collection.mutable.ListBuffer
import org.apache.spark.sql._
import org.apache.spark.sql.execution.command.AlterTableModel
import org.apache.spark.sql.execution.command.management.CarbonAlterTableCompactionCommand
-import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.sql.util.SparkSQLUtil
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager
import org.apache.carbondata.core.util.ThreadLocalSessionInfo
-import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil
import org.apache.carbondata.processing.merger.{CarbonDataMergerUtil, CarbonDataMergerUtilResult, CompactionType}
object HorizontalCompaction {
- val LOG: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOG = LogServiceFactory.getLogService(this.getClass.getName)
/**
* The method does horizontal compaction. After Update and Delete completion
@@ -131,7 +130,7 @@ object HorizontalCompaction {
}
LOG.info(s"Horizontal Update Compaction operation started for [$db.$table].")
- LOG.audit(s"Horizontal Update Compaction operation started for [$db.$table].")
+ Audit.log(LOG, s"Horizontal Update Compaction operation started for [$db.$table].")
try {
// Update Compaction.
@@ -155,7 +154,7 @@ object HorizontalCompaction {
s"Horizontal Update Compaction Failed for [${ db }.${ table }]. " + msg, factTimeStamp)
}
LOG.info(s"Horizontal Update Compaction operation completed for [${ db }.${ table }].")
- LOG.audit(s"Horizontal Update Compaction operation completed for [${ db }.${ table }].")
+ Audit.log(LOG, s"Horizontal Update Compaction operation completed for [${ db }.${ table }].")
}
/**
@@ -181,7 +180,7 @@ object HorizontalCompaction {
}
LOG.info(s"Horizontal Delete Compaction operation started for [$db.$table].")
- LOG.audit(s"Horizontal Delete Compaction operation started for [$db.$table].")
+ Audit.log(LOG, s"Horizontal Delete Compaction operation started for [$db.$table].")
try {
@@ -226,7 +225,7 @@ object HorizontalCompaction {
timestamp.toString,
segmentUpdateStatusManager)
if (updateStatus == false) {
- LOG.audit(s"Delete Compaction data operation is failed for [$db.$table].")
+ Audit.log(LOG, s"Delete Compaction data operation is failed for [$db.$table].")
LOG.error("Delete Compaction data operation is failed.")
throw new HorizontalCompactionException(
s"Horizontal Delete Compaction Failed for [$db.$table] ." +
@@ -234,7 +233,7 @@ object HorizontalCompaction {
}
else {
LOG.info(s"Horizontal Delete Compaction operation completed for [$db.$table].")
- LOG.audit(s"Horizontal Delete Compaction operation completed for [$db.$table].")
+ Audit.log(LOG, s"Horizontal Delete Compaction operation completed for [$db.$table].")
}
}
catch {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
index b76a485..c230322 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
@@ -28,13 +28,14 @@ import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.util.AlterTableUtil
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.cache.CacheProvider
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.{DataMapStoreManager, Segment}
import org.apache.carbondata.core.datastore.compression.CompressorFactory
import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage}
-import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonMetadata}
+import org.apache.carbondata.core.metadata.CarbonMetadata
import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
import org.apache.carbondata.core.metadata.schema.partition.PartitionType
import org.apache.carbondata.core.mutate.CarbonUpdateUtil
@@ -48,7 +49,7 @@ case class CarbonAlterTableDropPartitionCommand(
model: AlterTableDropPartitionModel)
extends AtomicRunnableCommand {
- private val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ private val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
private val oldPartitionIds: util.ArrayList[Int] = new util.ArrayList[Int]()
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
@@ -121,7 +122,7 @@ case class CarbonAlterTableDropPartitionCommand(
}
override def processData(sparkSession: SparkSession): Seq[Row] = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
val dbName = model.databaseName.getOrElse(sparkSession.catalog.currentDatabase)
val tableName = model.tableName
var locks = List.empty[ICarbonLock]
@@ -168,7 +169,7 @@ case class CarbonAlterTableDropPartitionCommand(
LOGGER.info("Locks released after alter table drop partition action.")
}
LOGGER.info(s"Alter table drop partition is successful for table $dbName.$tableName")
- LOGGER.audit(s"Alter table drop partition is successful for table $dbName.$tableName")
+ Audit.log(LOGGER, s"Alter table drop partition is successful for table $dbName.$tableName")
Seq.empty
}
@@ -177,7 +178,7 @@ case class CarbonAlterTableDropPartitionCommand(
carbonLoadModel: CarbonLoadModel,
dropWithData: Boolean,
oldPartitionIds: List[Int]): Unit = {
- LOGGER.audit(s"Drop partition request received for table " +
+ Audit.log(LOGGER, s"Drop partition request received for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
try {
startDropThreads(
@@ -246,7 +247,7 @@ case class dropPartitionThread(sqlContext: SQLContext,
dropWithData: Boolean,
oldPartitionIds: List[Int]) extends Thread {
- private val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ private val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
override def run(): Unit = {
try {
@@ -254,8 +255,8 @@ case class dropPartitionThread(sqlContext: SQLContext,
segmentId, partitionId, dropWithData, oldPartitionIds)
} catch {
case e: Exception =>
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
- LOGGER.error(s"Exception in dropping partition thread: ${ e.getMessage } }")
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
+ LOGGER.error(s"Exception in dropping partition thread: ${ e.getMessage } }", e)
}
}
@@ -274,7 +275,7 @@ case class dropPartitionThread(sqlContext: SQLContext,
future.get
} catch {
case e: Exception =>
- LOGGER.error(e, s"Exception in partition drop thread ${ e.getMessage }")
+ LOGGER.error(s"Exception in partition drop thread ${ e.getMessage }", e)
throw e
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
index 753abaf..8b337c6 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
@@ -29,7 +29,8 @@ import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.util.{AlterTableUtil, PartitionUtils}
-import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.common.logging.impl.Audit
import org.apache.carbondata.core.cache.CacheProvider
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.DataMapStoreManager
@@ -54,7 +55,7 @@ case class CarbonAlterTableSplitPartitionCommand(
splitPartitionModel: AlterTableSplitPartitionModel)
extends AtomicRunnableCommand {
- private val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ private val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
private val oldPartitionIds: util.ArrayList[Int] = new util.ArrayList[Int]()
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
@@ -182,11 +183,12 @@ case class CarbonAlterTableSplitPartitionCommand(
} finally {
AlterTableUtil.releaseLocks(locks)
CacheProvider.getInstance().dropAllCache()
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
LOGGER.info("Locks released after alter table add/split partition action.")
if (success) {
LOGGER.info(s"Alter table add/split partition is successful for table $dbName.$tableName")
- LOGGER.audit(s"Alter table add/split partition is successful for table $dbName.$tableName")
+ Audit.log(LOGGER,
+ s"Alter table add/split partition is successful for table $dbName.$tableName")
}
}
Seq.empty
@@ -198,7 +200,7 @@ case class CarbonAlterTableSplitPartitionCommand(
carbonLoadModel: CarbonLoadModel,
oldPartitionIdList: List[Int]
): Unit = {
- LOGGER.audit(s"Add partition request received for table " +
+ Audit.log(LOGGER, s"Add partition request received for table " +
s"${ carbonLoadModel.getDatabaseName }.${ carbonLoadModel.getTableName }")
try {
startSplitThreads(sqlContext,
@@ -264,7 +266,7 @@ case class SplitThread(sqlContext: SQLContext,
partitionId: String,
oldPartitionIdList: List[Int]) extends Thread {
- private val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ private val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
override def run(): Unit = {
var triggeredSplitPartitionStatus = false
@@ -275,7 +277,7 @@ case class SplitThread(sqlContext: SQLContext,
triggeredSplitPartitionStatus = true
} catch {
case e: Exception =>
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
LOGGER.error(s"Exception in partition split thread: ${ e.getMessage } }")
exception = e
}
@@ -301,7 +303,7 @@ case class SplitThread(sqlContext: SQLContext,
}
} catch {
case e: Exception =>
- LOGGER.error(e, s"Exception in partition split thread ${ e.getMessage }")
+ LOGGER.error(s"Exception in partition split thread ${ e.getMessage }", e)
throw e
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
index b33652f..f606c04 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
@@ -64,7 +64,7 @@ object AlterTableDropPartitionPreStatusListener extends OperationEventListener {
trait CommitHelper {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
protected def markInProgressSegmentAsDeleted(tableStatusFile: String,
operationContext: OperationContext,
@@ -586,7 +586,7 @@ object CommitPreAggregateListener extends OperationEventListener with CommitHelp
} catch {
case e: Exception =>
operationContext.setProperty("commitComplete", false)
- LOGGER.error(e, "Problem while committing data maps")
+ LOGGER.error("Problem while committing data maps", e)
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
index f26d1cb..d16f570 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
@@ -419,11 +419,10 @@ object PreAggregateUtil {
*/
def updateMainTable(carbonTable: CarbonTable,
childSchema: DataMapSchema, sparkSession: SparkSession): TableInfo = {
- val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+ val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
val locksToBeAcquired = List(LockUsage.METADATA_LOCK,
LockUsage.DROP_TABLE_LOCK)
var locks = List.empty[ICarbonLock]
- var numberOfCurrentChild: Int = 0
val dbName = carbonTable.getDatabaseName
val tableName = carbonTable.getTableName
try {
@@ -450,7 +449,7 @@ object PreAggregateUtil {
thriftTableInfo
} catch {
case e: Exception =>
- LOGGER.error(e, "Pre Aggregate Parent table update failed reverting changes")
+ LOGGER.error("Pre Aggregate Parent table update failed reverting changes", e)
throw e
} finally {
// release lock after command execution completion
[5/6] carbondata git commit: [CARBONDATA-3024] Refactor to use log4j
Logger directly
Posted by xu...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServerHandler.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServerHandler.java b/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServerHandler.java
index 82efe80..0f076a4 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServerHandler.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/server/NonSecureDictionaryServerHandler.java
@@ -16,7 +16,6 @@
*/
package org.apache.carbondata.core.dictionary.server;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.dictionary.generator.ServerDictionaryGenerator;
import org.apache.carbondata.core.dictionary.generator.key.DictionaryMessage;
@@ -26,6 +25,7 @@ import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
+import org.apache.log4j.Logger;
/**
* Handler for Dictionary server.
@@ -33,7 +33,7 @@ import io.netty.channel.ChannelInboundHandlerAdapter;
@ChannelHandler.Sharable public class NonSecureDictionaryServerHandler
extends ChannelInboundHandlerAdapter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(NonSecureDictionaryServerHandler.class.getName());
/**
@@ -77,7 +77,7 @@ import io.netty.channel.ChannelInboundHandlerAdapter;
* @param cause
*/
@Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
- LOGGER.error(cause, "exceptionCaught");
+ LOGGER.error("exceptionCaught", cause);
ctx.close();
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/dictionary/service/AbstractDictionaryServer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/service/AbstractDictionaryServer.java b/core/src/main/java/org/apache/carbondata/core/dictionary/service/AbstractDictionaryServer.java
index 754f253..5703051 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/service/AbstractDictionaryServer.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/service/AbstractDictionaryServer.java
@@ -27,13 +27,12 @@ import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
-
import org.apache.commons.lang3.SystemUtils;
+import org.apache.log4j.Logger;
public abstract class AbstractDictionaryServer {
- public String findLocalIpAddress(LogService LOGGER) {
+ public String findLocalIpAddress(Logger LOGGER) {
try {
String defaultIpOverride = System.getenv("SPARK_LOCAL_IP");
if (defaultIpOverride != null) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/fileoperations/AtomicFileOperationsImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/fileoperations/AtomicFileOperationsImpl.java b/core/src/main/java/org/apache/carbondata/core/fileoperations/AtomicFileOperationsImpl.java
index f9f8647..13f10d7 100644
--- a/core/src/main/java/org/apache/carbondata/core/fileoperations/AtomicFileOperationsImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/fileoperations/AtomicFileOperationsImpl.java
@@ -21,7 +21,6 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
@@ -29,12 +28,14 @@ import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory.FileType;
import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.log4j.Logger;
+
class AtomicFileOperationsImpl implements AtomicFileOperations {
/**
* Logger instance
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AtomicFileOperationsImpl.class.getName());
private String filePath;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
index ed709c5..5f1bca4 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
@@ -25,7 +25,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CarbonLRUCache;
@@ -41,6 +40,7 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.util.BlockletDataMapUtil;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Class to handle loading, unloading,clearing,storing of the table
@@ -48,7 +48,7 @@ import org.apache.hadoop.conf.Configuration;
*/
public class BlockletDataMapIndexStore
implements Cache<TableBlockIndexUniqueIdentifierWrapper, BlockletDataMapIndexWrapper> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BlockletDataMapIndexStore.class.getName());
/**
* CarbonLRU cache
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailInfo.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailInfo.java
index 973a240..9ce932c 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailInfo.java
@@ -24,13 +24,13 @@ import java.io.IOException;
import java.io.Serializable;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
import org.apache.carbondata.core.util.BlockletDataMapUtil;
import org.apache.hadoop.io.Writable;
+import org.apache.log4j.Logger;
/**
* Blocklet detail information to be sent to each executor
@@ -40,7 +40,7 @@ public class BlockletDetailInfo implements Serializable, Writable {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BlockletDetailInfo.class.getName());
private static final long serialVersionUID = 7957493757421513808L;
@@ -128,13 +128,13 @@ public class BlockletDetailInfo implements Serializable, Writable {
try {
blockletInfo.readFields(inputStream);
} catch (IOException e) {
- LOGGER.error("Problem in reading blocklet info");
+ LOGGER.error("Problem in reading blocklet info", e);
throw new IOException("Problem in reading blocklet info." + e.getMessage());
} finally {
try {
inputStream.close();
} catch (IOException e) {
- LOGGER.error(e, "Problem in closing input stream of reading blocklet info.");
+ LOGGER.error("Problem in closing input stream of reading blocklet info.", e);
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
index d7b7977..5a25bc5 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockDataMap.java
@@ -16,11 +16,15 @@
*/
package org.apache.carbondata.core.indexstore.blockletindex;
-import java.io.*;
+import java.io.IOException;
+import java.io.Serializable;
+import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.DataMapModel;
@@ -66,6 +70,7 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
/**
* Datamap implementation for block.
@@ -73,7 +78,7 @@ import org.apache.hadoop.fs.Path;
public class BlockDataMap extends CoarseGrainDataMap
implements BlockletDataMapRowIndexes, Serializable {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BlockDataMap.class.getName());
protected static final long serialVersionUID = -2170289352240810993L;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
index f19c9c9..909d79f 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
@@ -27,7 +27,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
@@ -49,6 +48,7 @@ import org.apache.carbondata.format.MergedBlockIndex;
import org.apache.carbondata.format.MergedBlockIndexHeader;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
import org.apache.thrift.TBase;
/**
@@ -60,7 +60,7 @@ public class SegmentIndexFileStore {
/**
* Logger constant
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SegmentIndexFileStore.class.getName());
/**
* Stores the indexfile name and related binary file data in it.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
index 1caa3e4..a49eced 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
@@ -21,13 +21,14 @@ import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.log4j.Logger;
+
/**
* The class provides the method to generate dictionary key and getting the actual value from
* the dictionaryKey for direct dictionary column for TIMESTAMP type.
@@ -53,7 +54,7 @@ public class DateDirectDictionaryGenerator implements DirectDictionaryGenerator
/**
* Logger instance
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DateDirectDictionaryGenerator.class.getName());
static {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
index 72ed66c..1e5a1f1 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
@@ -20,7 +20,6 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
@@ -33,6 +32,8 @@ import static org.apache.carbondata.core.keygenerator.directdictionary.timestamp
import static org.apache.carbondata.core.keygenerator.directdictionary.timestamp.TimeStampGranularityConstants.TIME_GRAN_MIN;
import static org.apache.carbondata.core.keygenerator.directdictionary.timestamp.TimeStampGranularityConstants.TIME_GRAN_SEC;
+import org.apache.log4j.Logger;
+
/**
* The class provides the method to generate dictionary key and getting the actual value from
* the dictionaryKey for direct dictionary column for TIMESTAMP type.
@@ -56,7 +57,7 @@ public class TimeStampDirectDictionaryGenerator implements DirectDictionaryGener
/**
* Logger instance
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(TimeStampDirectDictionaryGenerator.class.getName());
/*
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
index 79bad6c..b98ebb7 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
@@ -17,12 +17,13 @@
package org.apache.carbondata.core.locks;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* This class is a Lock factory class which is used to provide lock objects.
* Using this lock object client can request the lock and unlock.
@@ -32,7 +33,7 @@ public class CarbonLockFactory {
/**
* Attribute for LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonLockFactory.class.getName());
/**
* lockTypeConfigured to check if zookeeper feature is enabled or not for carbon.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
index ca6cddb..3d86587 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
@@ -17,7 +17,6 @@
package org.apache.carbondata.core.locks;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
@@ -29,13 +28,14 @@ import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
/**
* This class contains all carbon lock utilities
*/
public class CarbonLockUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonLockUtil.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
index bc65ece..b7b2c97 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
@@ -20,18 +20,19 @@ package org.apache.carbondata.core.locks;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.log4j.Logger;
+
/**
* This class is used to handle the HDFS File locking.
* This is achieved using the concept of acquiring the data out stream using Append option.
*/
public class HdfsFileLock extends AbstractCarbonLock {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(HdfsFileLock.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
index 5e3033e..35b4a1d 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
@@ -24,12 +24,13 @@ import java.nio.channels.OverlappingFileLockException;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.log4j.Logger;
+
/**
* This class handles the file locking in the local file system.
* This will be handled using the file channel lock API.
@@ -53,7 +54,7 @@ public class LocalFileLock extends AbstractCarbonLock {
/**
* LOGGER for logging the messages.
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LocalFileLock.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
index 10bab28..e9d9aed 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
@@ -20,19 +20,20 @@ package org.apache.carbondata.core.locks;
import java.io.DataOutputStream;
import java.io.IOException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.log4j.Logger;
+
/**
* This class is used to handle the S3 File locking.
* This is acheived using the concept of acquiring the data out stream using Append option.
*/
public class S3FileLock extends AbstractCarbonLock {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(S3FileLock.class.getName());
/**
@@ -93,7 +94,7 @@ public class S3FileLock extends AbstractCarbonLock {
FileFactory.getFileType(lockFilePath));
return true;
} catch (IOException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
return false;
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java b/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
index 5a055ab..de8d6a6 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
@@ -20,13 +20,13 @@ package org.apache.carbondata.core.locks;
import java.util.Collections;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.log4j.Logger;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooDefs.Ids;
@@ -37,7 +37,7 @@ import org.apache.zookeeper.ZooKeeper;
*/
public class ZooKeeperLocking extends AbstractCarbonLock {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ZooKeeperLocking.class.getName());
/**
@@ -106,7 +106,7 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
zk.create(this.lockTypeFolder, new byte[1], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
} catch (KeeperException | InterruptedException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
}
initRetry();
}
@@ -167,7 +167,7 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
return false;
}
} catch (KeeperException | InterruptedException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
return false;
}
}
@@ -183,7 +183,7 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
lockPath = null;
}
} catch (KeeperException | InterruptedException e) {
- LOGGER.error(e, e.getMessage());
+ LOGGER.error(e.getMessage(), e);
return false;
}
return true;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
index 102ea4c..5e59593 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
@@ -19,9 +19,9 @@ package org.apache.carbondata.core.locks;
import java.io.IOException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
@@ -31,7 +31,7 @@ import org.apache.zookeeper.ZooKeeper;
*/
public class ZookeeperInit {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ZookeeperInit.class.getName());
private static ZookeeperInit zooKeeperInit;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/memory/IntPointerBuffer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/IntPointerBuffer.java b/core/src/main/java/org/apache/carbondata/core/memory/IntPointerBuffer.java
index 58d873c..c596b08 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/IntPointerBuffer.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/IntPointerBuffer.java
@@ -17,15 +17,16 @@
package org.apache.carbondata.core.memory;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
+
/**
* Holds the pointers for rows.
*/
public class IntPointerBuffer {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(IntPointerBuffer.class.getName());
private int length;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
index 048c058..6a69dfd 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
@@ -23,17 +23,18 @@ import java.util.Iterator;
import java.util.Map;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* Manages memory for instance.
*/
public class UnsafeMemoryManager {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeMemoryManager.class.getName());
private static boolean offHeap = Boolean.parseBoolean(CarbonProperties.getInstance()
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
index 67b8d43..8dcf915 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
@@ -22,11 +22,12 @@ import java.util.Iterator;
import java.util.Map;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* Memory manager to keep track of
* all memory for storing the sorted data
@@ -36,7 +37,7 @@ public class UnsafeSortMemoryManager {
/**
* logger
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeSortMemoryManager.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
index 44a2f7e..1e1e303 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
@@ -20,7 +20,6 @@ import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
@@ -51,13 +50,14 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
import com.google.gson.Gson;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
/**
* Provide read and write support for segment file associated with each segment
*/
public class SegmentFileStore {
- private static LogService LOGGER = LogServiceFactory.getLogService(
+ private static final Logger LOGGER = LogServiceFactory.getLogService(
SegmentFileStore.class.getCanonicalName());
private SegmentFile segmentFile;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index e57605a..2f68754 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -28,7 +28,6 @@ import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
@@ -64,13 +63,14 @@ import static org.apache.carbondata.core.metadata.schema.datamap.DataMapClassPro
import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Mapping class for Carbon actual table
*/
public class CarbonTable implements Serializable {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonTable.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
index 38145e5..b3e9e7e 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
@@ -30,7 +30,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -38,13 +37,15 @@ import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
import org.apache.carbondata.core.metadata.schema.table.column.ParentColumnTableRelation;
+import org.apache.log4j.Logger;
+
/**
* Store the information about the table.
* it stores the fact table as well as aggregate table present in the schema
*/
public class TableInfo implements Serializable, Writable {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(TableInfo.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java b/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
index d52eeb2..3924c0d 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
@@ -26,7 +26,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -48,13 +47,14 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
/**
* This class contains all update utility methods
*/
public class CarbonUpdateUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonUpdateUtil.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/mutate/DeleteDeltaBlockDetails.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/DeleteDeltaBlockDetails.java b/core/src/main/java/org/apache/carbondata/core/mutate/DeleteDeltaBlockDetails.java
index b0a0139..045f0e7 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/DeleteDeltaBlockDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/DeleteDeltaBlockDetails.java
@@ -25,9 +25,10 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
+
/**
* This class stores the block details of delete delta file
*/
@@ -41,7 +42,7 @@ public class DeleteDeltaBlockDetails implements Serializable {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DeleteDeltaBlockDetails.class.getName());
public DeleteDeltaBlockDetails(String blockName) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java b/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
index 583e2ec..a6fbb4f 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
@@ -19,10 +19,11 @@ package org.apache.carbondata.core.mutate;
import java.io.Serializable;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.statusmanager.SegmentStatus;
+import org.apache.log4j.Logger;
+
/**
* This class stores the segment details of table update status file
*/
@@ -40,7 +41,7 @@ public class SegmentUpdateDetails implements Serializable {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SegmentUpdateDetails.class.getName());
public String getDeleteDeltaEndTimestamp() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
index b2260f0..49dec4c 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
@@ -23,8 +23,6 @@ import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringWriter;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.fileoperations.AtomicFileOperationFactory;
@@ -39,12 +37,6 @@ import com.google.gson.Gson;
*/
public class CarbonDeleteDeltaFileReaderImpl implements CarbonDeleteDeltaFileReader {
- /**
- * LOGGER
- */
- private static final LogService LOGGER =
- LogServiceFactory.getLogService(CarbonDeleteDeltaFileReaderImpl.class.getName());
-
private String filePath;
private FileFactory.FileType fileType;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
index cc6e53f..32eb60d 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
@@ -30,7 +30,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -39,6 +38,7 @@ import org.apache.carbondata.core.mutate.DeleteDeltaBlockletDetails;
import org.apache.carbondata.core.mutate.DeleteDeltaVo;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
/**
* This class perform the functionality of reading multiple delete delta files
@@ -48,7 +48,7 @@ public class CarbonDeleteFilesDataReader {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDeleteFilesDataReader.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
index 439a93e..ef40c16 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
@@ -19,7 +19,6 @@ package org.apache.carbondata.core.reader.sortindex;
import java.io.IOException;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -29,6 +28,7 @@ import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReaderImpl;
import org.apache.carbondata.core.reader.ThriftReader;
import org.apache.carbondata.format.ColumnSortInfo;
+import org.apache.log4j.Logger;
import org.apache.thrift.TBase;
/**
@@ -54,7 +54,7 @@ public class CarbonDictionarySortIndexReaderImpl implements CarbonDictionarySort
/**
* Comment for <code>LOGGER</code>
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonDictionarySortIndexReaderImpl.class.getName());
/**
@@ -97,7 +97,7 @@ public class CarbonDictionarySortIndexReaderImpl implements CarbonDictionarySort
try {
columnSortInfo = (ColumnSortInfo) dictionarySortIndexThriftReader.read();
} catch (IOException ie) {
- LOGGER.error(ie, "problem while reading the column sort info.");
+ LOGGER.error("problem while reading the column sort info.", ie);
throw new IOException("problem while reading the column sort info.", ie);
} finally {
if (null != dictionarySortIndexThriftReader) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
index e0a0b90..68f8ae6 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
@@ -16,11 +16,21 @@
*/
package org.apache.carbondata.core.scan.collector;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.scan.collector.impl.*;
+import org.apache.carbondata.core.scan.collector.impl.AbstractScannedResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.DictionaryBasedResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.DictionaryBasedVectorResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RawBasedResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RestructureBasedDictionaryResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RestructureBasedRawResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RestructureBasedVectorResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RowIdBasedResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RowIdRawBasedResultCollector;
+import org.apache.carbondata.core.scan.collector.impl.RowIdRestructureBasedRawResultCollector;
import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
+import org.apache.log4j.Logger;
+
/**
* This class will provide the result collector instance based on the required type
*/
@@ -29,7 +39,7 @@ public class ResultCollectorFactory {
/**
* logger of result collector factory
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(ResultCollectorFactory.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
index 33f0db7..ed8d48e 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
@@ -19,8 +19,6 @@ package org.apache.carbondata.core.scan.collector.impl;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
import org.apache.carbondata.core.keygenerator.KeyGenException;
@@ -47,12 +45,6 @@ import org.apache.commons.lang3.ArrayUtils;
public class RestructureBasedRawResultCollector extends RawBasedResultCollector {
/**
- * logger
- */
- private static final LogService LOGGER =
- LogServiceFactory.getLogService(RestructureBasedRawResultCollector.class.getName());
-
- /**
* Key generator which will form the mdKey according to latest schema
*/
private KeyGenerator restructuredKeyGenerator;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index a419c02..6a6a929 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -30,9 +30,7 @@ import java.util.Set;
import java.util.concurrent.ExecutorService;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.common.logging.impl.StandardLogService;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -78,6 +76,7 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* This class provides a skeletal implementation of the {@link QueryExecutor}
@@ -86,7 +85,7 @@ import org.apache.hadoop.conf.Configuration;
*/
public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AbstractQueryExecutor.class.getName());
/**
* holder for query properties which will be used to execute the query
@@ -119,9 +118,6 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
* @param queryModel
*/
protected void initQuery(QueryModel queryModel) throws IOException {
- StandardLogService.setThreadName(StandardLogService.getPartitionID(
- queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName()),
- queryModel.getQueryId());
LOGGER.info("Query will be executed on table: " + queryModel.getAbsoluteTableIdentifier()
.getCarbonTableIdentifier().getTableName());
this.freeUnsafeMemory = queryModel.isFreeUnsafeMemory();
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
index 6d03540..fe91442 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
@@ -22,7 +22,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.scan.executor.exception.QueryExecutionException;
@@ -32,9 +31,10 @@ import org.apache.carbondata.core.scan.result.iterator.SearchModeResultIterator;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
public class SearchModeDetailQueryExecutor extends AbstractQueryExecutor<Object> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SearchModeDetailQueryExecutor.class.getName());
private static ExecutorService executorService = null;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
index 418ef42..dd5f364 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
@@ -22,7 +22,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.scan.executor.exception.QueryExecutionException;
import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
@@ -33,12 +32,13 @@ import org.apache.carbondata.core.util.CarbonProperties;
import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_SEARCH_MODE_SCAN_THREAD;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Below class will be used to execute the detail query and returns columnar vectors.
*/
public class SearchModeVectorDetailQueryExecutor extends AbstractQueryExecutor<Object> {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(SearchModeVectorDetailQueryExecutor.class.getName());
private static ExecutorService executorService = null;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
index 585bf60..c47d5ff 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
@@ -23,7 +23,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.metadata.datatype.DataType;
@@ -43,8 +42,10 @@ import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.GREATER
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.LESSTHAN;
import static org.apache.carbondata.core.scan.filter.intf.ExpressionType.LESSTHAN_EQUALTO;
+import org.apache.log4j.Logger;
+
public class RangeExpressionEvaluator {
- private static final LogService LOG =
+ private static final Logger LOG =
LogServiceFactory.getLogService(RangeExpressionEvaluator.class.getName());
private Expression expr;
private Expression srcNode;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
index fcb374f..7269304 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -20,7 +20,6 @@ import java.io.IOException;
import java.util.BitSet;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.DataRefNode;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -64,9 +63,11 @@ import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.TrueConditio
import org.apache.carbondata.core.scan.partition.PartitionUtil;
import org.apache.carbondata.core.scan.partition.Partitioner;
+import org.apache.log4j.Logger;
+
public class FilterExpressionProcessor implements FilterProcessor {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(FilterExpressionProcessor.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index fe92c42..06672f5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -36,7 +36,6 @@ import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
@@ -113,10 +112,11 @@ import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
import org.apache.commons.lang.ArrayUtils;
+import org.apache.log4j.Logger;
import org.roaringbitmap.RoaringBitmap;
public final class FilterUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(FilterUtil.class.getName());
private FilterUtil() {
@@ -1888,7 +1888,7 @@ public final class FilterUtil {
*/
public static void logError(Throwable e, boolean invalidRowsPresent) {
if (!invalidRowsPresent) {
- LOGGER.error(e, CarbonCommonConstants.FILTER_INVALID_MEMBER + e.getMessage());
+ LOGGER.error(CarbonCommonConstants.FILTER_INVALID_MEMBER + e.getMessage(), e);
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
index a5faacc..28c3f87 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
@@ -29,7 +29,6 @@ import java.util.BitSet;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -63,9 +62,11 @@ import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.DataTypeUtil;
+import org.apache.log4j.Logger;
+
public class RowLevelFilterExecuterImpl implements FilterExecuter {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(RowLevelFilterExecuterImpl.class.getName());
List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList;
List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
index f1bbe15..4f934ce 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
@@ -24,7 +24,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.metadata.encoder.Encoding;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
@@ -34,6 +33,8 @@ import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.util.DataTypeConverter;
+import org.apache.log4j.Logger;
+
public class QueryModelBuilder {
private CarbonTable table;
@@ -45,7 +46,7 @@ public class QueryModelBuilder {
/**
* log information
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(QueryModelBuilder.class.getName());
public QueryModelBuilder(CarbonTable table) {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java b/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
index 0f9ba22..9191d08 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
@@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
@@ -45,12 +44,14 @@ import org.apache.carbondata.core.stats.QueryStatisticsConstants;
import org.apache.carbondata.core.stats.QueryStatisticsModel;
import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.log4j.Logger;
+
/**
* Scanned result class which will store and provide the result on request
*/
public abstract class BlockletScannedResult {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(BlockletScannedResult.class.getName());
/**
* current row number
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index 5d430de..ed78aa7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -23,7 +23,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.DataRefNode;
@@ -44,6 +43,8 @@ import org.apache.carbondata.core.stats.QueryStatisticsModel;
import org.apache.carbondata.core.stats.QueryStatisticsRecorder;
import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.log4j.Logger;
+
/**
* In case of detail query we cannot keep all the records in memory so for
* executing that query are returning a iterator over block and every time next
@@ -54,7 +55,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
/**
* LOGGER.
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(AbstractDetailQueryResultIterator.class.getName());
private static final Map<DeleteDeltaInfo, Object> deleteDeltaToLockObjectMap =
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
index efa5b8a..29d8751 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
@@ -17,13 +17,14 @@
package org.apache.carbondata.core.scan.result.iterator;
import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
import org.apache.carbondata.core.keygenerator.KeyGenException;
import org.apache.carbondata.core.scan.result.RowBatch;
import org.apache.carbondata.core.scan.wrappers.ByteArrayWrapper;
+import org.apache.log4j.Logger;
+
/**
* This is a wrapper iterator over the detail raw query iterator.
* This iterator will handle the processing of the raw rows.
@@ -49,7 +50,7 @@ public class RawResultIterator extends CarbonIterator<Object[]> {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(RawResultIterator.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImpl.java b/core/src/main/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImpl.java
index 1e30fe6..7dfc5dd 100644
--- a/core/src/main/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImpl.java
@@ -22,19 +22,20 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.StatisticLevel;
import static org.apache.carbondata.core.util.CarbonUtil.printLine;
import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
/**
* Class will be used to record and log the query statistics
*/
public class DriverQueryStatisticsRecorderImpl implements QueryStatisticsRecorder {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DriverQueryStatisticsRecorderImpl.class.getName());
/**
@@ -119,7 +120,7 @@ public class DriverQueryStatisticsRecorderImpl implements QueryStatisticsRecorde
if (entry.getValue().size() >= 2) {
String tableInfo = collectDriverStatistics(entry.getValue(), queryId);
if (null != tableInfo) {
- LOGGER.statistic(tableInfo);
+ LOGGER.log(StatisticLevel.STATISTIC, tableInfo);
// clear the statistics that has been printed
entries.remove();
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
index 78ebd09..661bf93 100644
--- a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatisticsRecorderImpl.java
@@ -20,15 +20,17 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.StatisticLevel;
+
+import org.apache.log4j.Logger;
/**
* Class will be used to record and log the query statistics
*/
public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder, Serializable {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(QueryStatisticsRecorderImpl.class.getName());
/**
@@ -67,7 +69,7 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder, Ser
*/
public void logStatistics() {
for (QueryStatistic statistic : queryStatistics) {
- LOGGER.statistic(statistic.getStatistics(queryId));
+ LOGGER.log(StatisticLevel.STATISTIC, statistic.getStatistics(queryId));
}
}
@@ -76,7 +78,8 @@ public class QueryStatisticsRecorderImpl implements QueryStatisticsRecorder, Ser
*/
public void logStatisticsForTask(TaskStatistics result) {
if (null != result) {
- LOGGER.statistic("Print query statistic for each task id:" + "\n" + result.toString());
+ LOGGER.log(StatisticLevel.STATISTIC,
+ "Print query statistic for each task id:" + "\n" + result.toString());
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
index 9dc8fe6..b19e774 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
@@ -22,10 +22,11 @@ import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.log4j.Logger;
+
/*
Prior to Carbon 1.3 the the loadMetaData @timestamp and @loadStartTime was stored as
as date string format "dd-MM-yyyy HH:mm:ss:SSS". The date string value is specific
@@ -95,7 +96,7 @@ public class LoadMetadataDetails implements Serializable {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(LoadMetadataDetails.class.getName());
// dont remove static as the write will fail.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
index f1ee877..9196367 100755
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
@@ -30,8 +30,8 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.common.logging.impl.Audit;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -56,13 +56,14 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
import com.google.gson.Gson;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
/**
* Manages Load/Segment status
*/
public class SegmentStatusManager {
- private static final LogService LOG =
+ private static final Logger LOG =
LogServiceFactory.getLogService(SegmentStatusManager.class.getName());
private AbsoluteTableIdentifier identifier;
@@ -252,7 +253,7 @@ public class SegmentStatusManager {
listOfLoadFolderDetailsArray =
gsonObjectToRead.fromJson(buffReader, LoadMetadataDetails[].class);
} catch (IOException e) {
- LOG.error(e, "Failed to read metadata of load");
+ LOG.error("Failed to read metadata of load", e);
throw e;
} finally {
closeStreams(buffReader, inStream, dataInputStream);
@@ -370,7 +371,7 @@ public class SegmentStatusManager {
String errorMsg = "Delete segment by id is failed for " + tableDetails
+ ". Not able to acquire the table status lock due to other operation running "
+ "in the background.";
- LOG.audit(errorMsg);
+ Audit.log(LOG, errorMsg);
LOG.error(errorMsg);
throw new Exception(errorMsg + " Please try after some time.");
}
@@ -380,7 +381,7 @@ public class SegmentStatusManager {
}
} else {
- LOG.audit("Delete segment by Id is failed. No matching segment id found.");
+ Audit.log(LOG, "Delete segment by Id is failed. No matching segment id found.");
return loadIds;
}
@@ -388,7 +389,7 @@ public class SegmentStatusManager {
String errorMsg = "Delete segment by id is failed for " + tableDetails
+ ". Not able to acquire the delete segment lock due to another delete "
+ "operation is running in the background.";
- LOG.audit(errorMsg);
+ Audit.log(LOG, errorMsg);
LOG.error(errorMsg);
throw new Exception(errorMsg + " Please try after some time.");
}
@@ -452,7 +453,7 @@ public class SegmentStatusManager {
String errorMsg = "Delete segment by date is failed for " + tableDetails
+ ". Not able to acquire the table status lock due to other operation running "
+ "in the background.";
- LOG.audit(errorMsg);
+ Audit.log(LOG, errorMsg);
LOG.error(errorMsg);
throw new Exception(errorMsg + " Please try after some time.");
@@ -462,7 +463,7 @@ public class SegmentStatusManager {
}
} else {
- LOG.audit("Delete segment by date is failed. No matching segment found.");
+ Audit.log(LOG, "Delete segment by date is failed. No matching segment found.");
invalidLoadTimestamps.add(loadDate);
return invalidLoadTimestamps;
}
@@ -471,7 +472,7 @@ public class SegmentStatusManager {
String errorMsg = "Delete segment by date is failed for " + tableDetails
+ ". Not able to acquire the delete segment lock due to another delete "
+ "operation is running in the background.";
- LOG.audit(errorMsg);
+ Audit.log(LOG, errorMsg);
LOG.error(errorMsg);
throw new Exception(errorMsg + " Please try after some time.");
}
@@ -578,7 +579,7 @@ public class SegmentStatusManager {
}
if (!loadFound) {
- LOG.audit("Delete segment by ID is failed. No matching segment id found :" + loadId);
+ Audit.log(LOG, "Delete segment by ID is failed. No matching segment id found :" + loadId);
invalidLoadIds.add(loadId);
return invalidLoadIds;
}
@@ -632,7 +633,7 @@ public class SegmentStatusManager {
if (!loadFound) {
invalidLoadTimestamps.add(loadDate);
- LOG.audit("Delete segment by date is failed. No matching segment found.");
+ Audit.log(LOG, "Delete segment by date is failed. No matching segment found.");
return invalidLoadTimestamps;
}
return invalidLoadTimestamps;
@@ -991,7 +992,7 @@ public class SegmentStatusManager {
dbName + "." + tableName +
". Not able to acquire the table status lock due to other operation " +
"running in the background.";
- LOG.audit(errorMsg);
+ Audit.log(LOG, errorMsg);
LOG.error(errorMsg);
throw new IOException(errorMsg + " Please try after some time.");
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
index c340714..c5f5f74 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
@@ -30,7 +30,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.Segment;
@@ -53,6 +52,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import com.google.gson.Gson;
+import org.apache.log4j.Logger;
/**
* Manages Segment & block status of carbon table for Delete operation
@@ -62,7 +62,7 @@ public class SegmentUpdateStatusManager {
/**
* logger
*/
- private static final LogService LOG =
+ private static final Logger LOG =
LogServiceFactory.getLogService(SegmentUpdateStatusManager.class.getName());
private final AbsoluteTableIdentifier identifier;
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/CarbonLoadStatisticsImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonLoadStatisticsImpl.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonLoadStatisticsImpl.java
index 6d6e3ed..618f4d1 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonLoadStatisticsImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonLoadStatisticsImpl.java
@@ -19,9 +19,10 @@ package org.apache.carbondata.core.util;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.log4j.Logger;
+
/**
* A util which provide methods used to record time information druing data loading.
*/
@@ -37,7 +38,7 @@ public class CarbonLoadStatisticsImpl implements LoadStatistics {
return carbonLoadStatisticsImplInstance;
}
- private final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonLoadStatisticsImpl.class.getName());
/*
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
index 231f8c4..b156ae6 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastore.blocklet.BlockletEncodedColumnPage;
import org.apache.carbondata.core.datastore.blocklet.EncodedBlocklet;
@@ -51,12 +50,14 @@ import org.apache.carbondata.format.IndexHeader;
import org.apache.carbondata.format.LocalDictionaryChunk;
import org.apache.carbondata.format.SegmentInfo;
+import org.apache.log4j.Logger;
+
/**
* Util class to convert to thrift metdata classes
*/
public class CarbonMetadataUtil {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonMetadataUtil.class.getName());
private CarbonMetadataUtil() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 3438c4e..a32ad52 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -29,13 +29,13 @@ import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+
import static org.apache.carbondata.core.constants.CarbonCommonConstants.BLOCKLET_SIZE;
import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_CUSTOM_BLOCK_DISTRIBUTION;
import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_DATA_FILE_VERSION;
@@ -70,12 +70,13 @@ import static org.apache.carbondata.core.constants.CarbonV3DataFormatConstants.B
import static org.apache.carbondata.core.constants.CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO;
import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
public final class CarbonProperties {
/**
* Attribute for Carbon LOGGER.
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonProperties.class.getName());
/**
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 937c222..77ad96d 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -24,7 +24,6 @@ import java.nio.charset.Charset;
import java.security.PrivilegedExceptionAction;
import java.util.*;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
@@ -112,6 +111,7 @@ import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
import org.apache.thrift.TBase;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TCompactProtocol;
@@ -123,7 +123,7 @@ public final class CarbonUtil {
/**
* Attribute for Carbon LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(CarbonUtil.class.getName());
/**
@@ -164,7 +164,7 @@ public final class CarbonUtil {
try {
closeStream(stream);
} catch (IOException e) {
- LOGGER.error(e, "Error while closing stream:" + e);
+ LOGGER.error("Error while closing stream:" + e, e);
}
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index fbcbee5..66faf20 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -28,7 +28,6 @@ import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.page.ColumnPage;
@@ -42,12 +41,14 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.log4j.Logger;
+
public final class DataTypeUtil {
/**
* LOGGER
*/
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DataTypeUtil.class.getName());
private static final ThreadLocal<DateFormat> timeStampformatter = new ThreadLocal<DateFormat>() {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/06adb5a0/core/src/main/java/org/apache/carbondata/core/util/DeleteLoadFolders.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DeleteLoadFolders.java b/core/src/main/java/org/apache/carbondata/core/util/DeleteLoadFolders.java
index a65294e..f1cc57f 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DeleteLoadFolders.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DeleteLoadFolders.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
import org.apache.carbondata.core.datamap.Segment;
@@ -43,9 +42,11 @@ import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager;
import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.log4j.Logger;
+
public final class DeleteLoadFolders {
- private static final LogService LOGGER =
+ private static final Logger LOGGER =
LogServiceFactory.getLogService(DeleteLoadFolders.class.getName());
private DeleteLoadFolders() {