You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ma...@apache.org on 2020/07/20 12:02:31 UTC
[carbondata] branch master updated: [CARBONDATA-3889] Cleanup typo
code for carbondata-core module
This is an automated email from the ASF dual-hosted git repository.
manhua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push:
new 05dc1e6 [CARBONDATA-3889] Cleanup typo code for carbondata-core module
05dc1e6 is described below
commit 05dc1e63ba7ce1f3abd54e7a2f91c4a94195b6b9
Author: QiangCai <qi...@qq.com>
AuthorDate: Tue Jul 7 20:13:11 2020 +0800
[CARBONDATA-3889] Cleanup typo code for carbondata-core module
Why is this PR needed?
There are many typos in carbondata-core module
What changes were proposed in this PR?
Cleanup typo code for carbondata-core module
Does this PR introduce any user interface change?
No
Is any new testcase added?
No
This closes #3828
---
core/pom.xml | 2 +-
.../apache/carbondata/core/cache/Cacheable.java | 2 +-
.../carbondata/core/cache/CarbonLRUCache.java | 6 +-
.../core/constants/CarbonLoadOptionConstants.java | 2 +-
.../constants/CarbonV3DataFormatConstants.java | 2 +-
.../carbondata/core/datastore/FileReader.java | 8 +-
.../core/datastore/ReusableDataBuffer.java | 2 +-
.../carbondata/core/datastore/TableSpec.java | 16 +-
.../core/datastore/block/AbstractIndex.java | 3 +-
.../core/datastore/block/SegmentProperties.java | 96 +++++------
.../block/SegmentPropertiesAndSchemaHolder.java | 2 +-
.../core/datastore/block/TableBlockInfo.java | 2 +-
.../core/datastore/chunk/DimensionColumnPage.java | 4 +-
.../chunk/impl/AbstractDimensionColumnPage.java | 2 +-
.../impl/VariableLengthDimensionColumnPage.java | 5 +-
.../chunk/reader/CarbonDataReaderFactory.java | 8 +-
.../dimension/AbstractDimensionChunkReader.java | 2 +-
.../dimension/v3/DimensionChunkPageReaderV3.java | 2 +-
.../dimension/v3/DimensionChunkReaderV3.java | 4 +-
.../measure/v3/MeasureChunkPageReaderV3.java | 2 +-
.../reader/measure/v3/MeasureChunkReaderV3.java | 2 +-
.../datastore/chunk/store/ColumnPageWrapper.java | 2 +-
.../chunk/store/DimensionDataChunkStore.java | 4 +-
.../impl/LocalDictDimensionDataChunkStore.java | 8 +-
...va => SafeAbstractDimensionDataChunkStore.java} | 12 +-
.../SafeFixedLengthDimensionDataChunkStore.java | 20 +--
...feVariableIntLengthDimensionDataChunkStore.java | 2 +-
.../SafeVariableLengthDimensionDataChunkStore.java | 20 +--
...VariableShortLengthDimensionDataChunkStore.java | 2 +-
.../UnsafeAbstractDimensionDataChunkStore.java | 10 +-
.../UnsafeFixedLengthDimensionDataChunkStore.java | 14 +-
...feVariableIntLengthDimensionDataChunkStore.java | 6 +-
...nsafeVariableLengthDimensionDataChunkStore.java | 14 +-
...VariableShortLengthDimensionDataChunkStore.java | 6 +-
.../columnar/ByteArrayBlockIndexerStorage.java | 12 +-
.../columnar/ObjectArrayBlockIndexerStorage.java | 8 +-
.../datastore/compression/CompressorFactory.java | 6 +-
.../core/datastore/compression/GzipCompressor.java | 4 +-
.../filesystem/AbstractDFSCarbonFile.java | 18 +--
.../datastore/filesystem/AlluxioCarbonFile.java | 4 +-
.../core/datastore/filesystem/HDFSCarbonFile.java | 10 +-
.../core/datastore/filesystem/LocalCarbonFile.java | 12 +-
.../core/datastore/filesystem/S3CarbonFile.java | 8 +-
.../datastore/filesystem/ViewFSCarbonFile.java | 2 +-
.../core/datastore/impl/FileFactory.java | 6 +-
.../core/datastore/impl/FileReaderImpl.java | 8 +-
.../core/datastore/impl/FileTypeInterface.java | 2 +-
.../core/datastore/page/ComplexColumnPage.java | 6 +-
.../page/DecoderBasedFallbackEncoder.java | 2 +-
.../core/datastore/page/EncodedTablePage.java | 2 +-
.../core/datastore/page/LocalDictColumnPage.java | 8 +-
.../datastore/page/SafeFixLengthColumnPage.java | 22 +--
.../datastore/page/encoding/ColumnPageEncoder.java | 10 +-
.../page/encoding/ColumnPageEncoderMeta.java | 6 +-
.../adaptive/AdaptiveDeltaIntegralCodec.java | 6 +-
.../encoding/adaptive/AdaptiveIntegralCodec.java | 6 +-
.../encoding/compress/DirectCompressCodec.java | 6 +-
.../carbondata/core/enums/EscapeSequences.java | 2 +-
.../apache/carbondata/core/index/IndexChooser.java | 2 +-
.../apache/carbondata/core/index/IndexFilter.java | 4 +-
.../carbondata/core/index/IndexInputFormat.java | 24 +--
.../org/apache/carbondata/core/index/IndexJob.java | 2 +-
.../carbondata/core/index/IndexStoreManager.java | 6 +-
.../apache/carbondata/core/index/TableIndex.java | 125 +++++++--------
.../carbondata/core/index/dev/CacheableIndex.java | 8 +-
.../apache/carbondata/core/index/dev/Index.java | 12 +-
.../carbondata/core/index/dev/IndexFactory.java | 2 +-
.../core/index/dev/cgindex/CoarseGrainIndex.java | 4 +-
.../index/dev/cgindex/CoarseGrainIndexFactory.java | 4 +-
.../core/index/dev/expr/AndIndexExprWrapper.java | 2 +-
.../core/index/dev/expr/IndexExprWrapper.java | 6 +-
.../core/index/dev/expr/IndexExprWrapperImpl.java | 2 +-
.../index/dev/expr/IndexWrapperSimpleInfo.java | 4 +-
.../core/index/dev/expr/OrIndexExprWrapper.java | 2 +-
.../core/index/dev/fgindex/FineGrainIndex.java | 4 +-
.../index/dev/fgindex/FineGrainIndexFactory.java | 12 +-
.../core/indexstore/BlockletDetailsFetcher.java | 4 +-
.../core/indexstore/BlockletIndexStore.java | 14 +-
.../core/indexstore/ExtendedBlocklet.java | 8 +-
.../core/indexstore/ExtendedBlockletWrapper.java | 4 +-
.../ExtendedBlockletWrapperContainer.java | 2 +-
.../TableBlockIndexUniqueIdentifier.java | 2 +-
.../core/indexstore/UnsafeMemoryDMStore.java | 8 +-
.../core/indexstore/blockletindex/BlockIndex.java | 46 +++---
.../blockletindex/BlockletIndexFactory.java | 40 ++---
.../blockletindex/BlockletIndexRowIndexes.java | 4 +-
.../blockletindex/SegmentIndexFileStore.java | 12 +-
.../core/indexstore/row/IndexRowImpl.java | 2 +-
.../core/indexstore/schema/SchemaGenerator.java | 2 +-
.../timestamp/DateDirectDictionaryGenerator.java | 2 +-
.../TimeStampDirectDictionaryGenerator.java | 2 +-
.../timestamp/TimeStampGranularityConstants.java | 2 +-
.../carbondata/core/keygenerator/mdkey/Bits.java | 26 +--
.../dictionaryholder/MapBasedDictionaryStore.java | 8 +-
.../carbondata/core/locks/AlluxioFileLock.java | 2 +-
.../carbondata/core/locks/CarbonLockFactory.java | 6 +-
.../carbondata/core/locks/CarbonLockUtil.java | 24 +--
.../apache/carbondata/core/locks/HdfsFileLock.java | 2 +-
.../apache/carbondata/core/locks/ICarbonLock.java | 2 +-
.../carbondata/core/locks/LocalFileLock.java | 2 +-
.../apache/carbondata/core/locks/LockUsage.java | 4 +-
.../apache/carbondata/core/locks/S3FileLock.java | 2 +-
.../carbondata/core/locks/ZooKeeperLocking.java | 10 +-
.../carbondata/core/memory/CarbonUnsafe.java | 2 +-
.../apache/carbondata/core/memory/MemoryBlock.java | 2 +-
.../core/memory/UnsafeMemoryManager.java | 40 ++---
.../core/memory/UnsafeSortMemoryManager.java | 12 +-
.../carbondata/core/metadata/CarbonMetadata.java | 10 +-
.../core/metadata/CarbonTableIdentifier.java | 2 +-
.../carbondata/core/metadata/SegmentFileStore.java | 44 ++---
.../core/metadata/blocklet/BlockletInfo.java | 16 +-
.../ThriftWrapperSchemaConverterImpl.java | 10 +-
.../core/metadata/schema/SchemaEvolutionEntry.java | 4 +-
.../metadata/schema/indextable/IndexTableInfo.java | 6 +-
.../core/metadata/schema/table/CarbonTable.java | 30 ++--
.../core/metadata/schema/table/TableInfo.java | 4 +-
.../metadata/schema/table/TableSchemaBuilder.java | 13 +-
.../metadata/schema/table/column/CarbonColumn.java | 4 +-
.../schema/table/column/CarbonDimension.java | 4 +-
.../table/column/CarbonImplicitDimension.java | 4 +-
.../metadata/schema/table/column/ColumnSchema.java | 4 +-
.../table/column/ParentColumnTableRelation.java | 4 +-
.../carbondata/core/mutate/CarbonUpdateUtil.java | 42 ++---
.../core/mutate/SegmentUpdateDetails.java | 8 +-
.../core/mutate/data/BlockMappingVO.java | 2 +-
.../core/preagg/TimeSeriesFunctionEnum.java | 2 +-
.../carbondata/core/preagg/TimeSeriesUDF.java | 14 +-
.../carbondata/core/profiler/ExplainCollector.java | 2 +-
.../TableStatusReadCommittedScope.java | 2 +-
.../core/reader/CarbonDeleteFilesDataReader.java | 4 +-
.../scan/collector/ResultCollectorFactory.java | 2 +-
.../impl/DictionaryBasedResultCollector.java | 8 +-
.../impl/DictionaryBasedVectorResultCollector.java | 8 +-
.../collector/impl/RawBasedResultCollector.java | 4 +-
.../RestructureBasedDictionaryResultCollector.java | 6 +-
.../RestructureBasedVectorResultCollector.java | 2 +-
.../collector/impl/RowIdBasedResultCollector.java | 2 +-
.../impl/RowIdRawBasedResultCollector.java | 8 +-
.../RowIdRestructureBasedRawResultCollector.java | 4 +-
.../scan/executor/impl/AbstractQueryExecutor.java | 28 ++--
.../scan/executor/infos/BlockExecutionInfo.java | 16 +-
.../core/scan/executor/util/QueryUtil.java | 10 +-
.../core/scan/executor/util/RestructureUtil.java | 8 +-
.../core/scan/expression/Expression.java | 2 +-
.../scan/expression/RangeExpressionEvaluator.java | 10 +-
.../core/scan/expression/UnknownExpression.java | 4 +-
.../conditional/ConditionalExpression.java | 2 +-
.../expression/conditional/EqualToExpression.java | 2 +-
.../scan/expression/conditional/InExpression.java | 18 +--
.../conditional/NotEqualsExpression.java | 2 +-
.../expression/conditional/NotInExpression.java | 24 +--
.../logical/BinaryLogicalExpression.java | 2 +-
.../core/scan/filter/FilterExecutorUtil.java | 10 +-
.../scan/filter/FilterExpressionProcessor.java | 22 +--
.../core/scan/filter/FilterProcessor.java | 2 +-
.../carbondata/core/scan/filter/FilterUtil.java | 178 ++++++++++-----------
...xecuterImpl.java => AndFilterExecutorImpl.java} | 56 +++----
.../scan/filter/executer/BitSetUpdaterFactory.java | 8 +-
...rInfo.java => DimColumnExecutorFilterInfo.java} | 2 +-
...terImpl.java => ExcludeFilterExecutorImpl.java} | 40 ++---
.../scan/filter/executer/FalseFilterExecutor.java | 2 +-
.../{FilterExecuter.java => FilterExecutor.java} | 2 +-
.../ImplicitIncludeFilterExecutorImpl.java | 4 +-
...terImpl.java => IncludeFilterExecutorImpl.java} | 66 ++++----
...o.java => MeasureColumnExecutorFilterInfo.java} | 4 +-
...ExecuterImpl.java => OrFilterExecutorImpl.java} | 32 ++--
...Impl.java => RangeValueFilterExecutorImpl.java} | 78 ++++-----
.../filter/executer/RestructureEvaluatorImpl.java | 2 +-
...erImpl.java => RowLevelFilterExecutorImpl.java} | 24 +--
...elRangeGreaterThanEqualFilterExecutorImpl.java} | 72 ++++-----
...owLevelRangeGreaterThanFilterExecutorImpl.java} | 82 +++++-----
...LevelRangeLessThanEqualFilterExecutorImpl.java} | 72 ++++-----
...> RowLevelRangeLessThanFilterExecutorImpl.java} | 78 ++++-----
....java => RowLevelRangeTypeExecutorFactory.java} | 30 ++--
.../scan/filter/executer/TrueFilterExecutor.java | 2 +-
...erExecuterType.java => FilterExecutorType.java} | 2 +-
...lterOptmizer.java => RangeFilterOptimizer.java} | 4 +-
.../resolver/ConditionalFilterResolverImpl.java | 24 +--
.../scan/filter/resolver/FilterResolverIntf.java | 6 +-
.../filter/resolver/LogicalFilterResolverImpl.java | 26 +--
.../resolver/RowLevelFilterResolverImpl.java | 12 +-
.../resolver/RowLevelRangeFilterResolverImpl.java | 32 ++--
.../resolverinfo/DimColumnResolvedFilterInfo.java | 26 +--
.../resolverinfo/FalseConditionalResolverImpl.java | 12 +-
.../MeasureColumnResolvedFilterInfo.java | 22 +--
.../resolverinfo/TrueConditionalResolverImpl.java | 8 +-
.../visitor/CustomTypeDictionaryVisitor.java | 2 +-
.../visitor/ResolvedFilterInfoVisitorIntf.java | 2 +-
.../carbondata/core/scan/model/QueryModel.java | 4 +-
.../core/scan/model/QueryModelBuilder.java | 14 +-
.../core/scan/processor/BlockletIterator.java | 20 +--
.../core/scan/processor/DataBlockIterator.java | 2 +-
.../core/scan/result/BlockletScannedResult.java | 36 ++---
.../AbstractDetailQueryResultIterator.java | 8 +-
...ava => PartitionSplitterRawResultIterator.java} | 4 +-
.../scan/result/iterator/RawResultIterator.java | 2 +-
.../scan/result/vector/CarbonColumnarBatch.java | 2 +-
.../directread/AbstractCarbonColumnarVector.java | 2 +-
.../ColumnarVectorWrapperDirectFactory.java | 2 +-
...ColumnarVectorWrapperDirectWithDeleteDelta.java | 2 +-
...apperDirectWithDeleteDeltaAndInvertedIndex.java | 6 +-
...nvertableVector.java => ConvertibleVector.java} | 2 +-
.../vector/impl/directread/SequentialFill.java | 2 +-
.../scan/scanner/impl/BlockletFilterScanner.java | 22 +--
.../carbondata/core/stats/QueryStatistic.java | 10 +-
.../core/statusmanager/LoadMetadataDetails.java | 18 +--
.../core/statusmanager/SegmentStatusManager.java | 44 ++---
.../statusmanager/SegmentUpdateStatusManager.java | 30 ++--
.../core/statusmanager/StageInputCollector.java | 6 +-
.../carbondata/core/stream/StreamPruner.java | 10 +-
.../core/util/AbstractDataFileFooterConverter.java | 4 +-
.../carbondata/core/util/BlockletIndexUtil.java | 4 +-
.../org/apache/carbondata/core/util/ByteUtil.java | 28 ++--
.../core/util/CarbonLoadStatisticsDummy.java | 2 +-
.../core/util/CarbonLoadStatisticsImpl.java | 32 ++--
.../carbondata/core/util/CarbonMetadataUtil.java | 22 +--
.../carbondata/core/util/CarbonProperties.java | 26 +--
.../apache/carbondata/core/util/CarbonUtil.java | 162 +++++++++----------
.../apache/carbondata/core/util/CustomIndex.java | 2 +-
.../apache/carbondata/core/util/DataTypeUtil.java | 52 +++---
.../carbondata/core/util/DeleteLoadFolders.java | 4 +-
.../carbondata/core/util/LoadStatistics.java | 2 +-
.../carbondata/core/util/TaskMetricsMap.java | 12 +-
.../carbondata/core/util/path/CarbonTablePath.java | 20 +--
.../org/apache/carbondata/core/view/MVCatalog.java | 2 +-
.../org/apache/carbondata/core/view/MVManager.java | 8 +-
.../org/apache/carbondata/core/view/MVSchema.java | 4 +-
.../core/writer/CarbonIndexFileMergeWriter.java | 22 +--
.../apache/carbondata/hadoop/CarbonInputSplit.java | 42 ++---
.../blockletindex/TestBlockletIndex.java | 6 +-
.../blockletindex/TestBlockletIndexFactory.java | 3 +-
...est.java => ExcludeFilterExecutorImplTest.java} | 2 +-
...est.java => IncludeFilterExecutorImplTest.java} | 6 +-
.../core/util/RangeFilterProcessorTest.java | 24 +--
.../geo/scan/expression/PolygonExpression.java | 4 +-
.../filter/executor/PolygonFilterExecutorImpl.java | 4 +-
.../hadoop/stream/StreamRecordReader.java | 6 +-
.../index/bloom/BloomCoarseGrainIndex.java | 4 +-
.../index/lucene/LuceneFineGrainIndex.java | 4 +-
.../apache/carbon/core/metadata/StageManager.java | 2 +-
.../carbon/flink/TestCarbonPartitionWriter.scala | 2 +-
.../org/apache/carbondata/api/CarbonStore.scala | 10 +-
.../apache/carbondata/indexserver/IndexJobs.scala | 6 +-
.../spark/rdd/CarbonDataRDDFactory.scala | 2 +-
.../management/CarbonDeleteStageFilesCommand.scala | 2 +-
.../management/CarbonInsertFromStageCommand.scala | 12 +-
.../command/table/CarbonDropTableCommand.scala | 2 +-
.../command/table/CarbonExplainCommand.scala | 4 +-
.../apache/spark/sql/index/CarbonIndexUtil.scala | 4 +-
.../Jobs/BlockletIndexInputFormat.java | 2 +-
.../load/CarbonInternalLoaderUtil.java | 2 +-
.../spark/testsuite/index/CGIndexTestCase.scala | 4 +-
.../spark/testsuite/index/FGIndexTestCase.scala | 4 +-
pom.xml | 4 +-
.../processing/merger/CarbonDataMergerUtil.java | 2 +-
255 files changed, 1633 insertions(+), 1638 deletions(-)
diff --git a/core/pom.xml b/core/pom.xml
index 5cf11a7..bf9a90f 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -147,7 +147,7 @@
<phase>generate-resources</phase>
<!-- Execute the shell script to generate the CarbonData build information. -->
<configuration>
- <executable>${project.basedir}/../build/carbondata-build-info${script.exetension}</executable>
+ <executable>${project.basedir}/../build/carbondata-build-info${script.extension}</executable>
<arguments>
<argument>${project.build.directory}/extra-resources</argument>
<argument>${project.version}</argument>
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java b/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java
index e348890..2efa3ad 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/Cacheable.java
@@ -40,7 +40,7 @@ public interface Cacheable {
/**
* Method to be used for invalidating the cacheable object. API to be invoked at the time of
- * removing the cacheable object from memory. Example at the of removing the cachebale object
+ * removing the cacheable object from memory. Example at removing the cacheable object
* from LRU cache
*/
void invalidate();
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java b/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
index 3b19425..0759798 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/CarbonLRUCache.java
@@ -282,7 +282,7 @@ public final class CarbonLRUCache {
}
/**
- * This method will check if size is available to laod dictionary into memory
+ * This method will check if size is available to load dictionary into memory
*
* @param requiredSize
* @return
@@ -306,8 +306,8 @@ public final class CarbonLRUCache {
*/
public void clear() {
synchronized (expiringMap) {
- for (Cacheable cachebleObj : expiringMap.values()) {
- cachebleObj.invalidate();
+ for (Cacheable cacheable : expiringMap.values()) {
+ cacheable.invalidate();
}
expiringMap.clear();
}
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java
index 3a8aa3d..5950611 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java
@@ -144,7 +144,7 @@ public final class CarbonLoadOptionConstants {
public static final String ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH_DEFAULT = "false";
/**
- * If the sort memory is insufficient, spill inmemory pages to disk.
+ * If the sort memory is insufficient, spill in-memory pages to disk.
* The total amount of pages is at most the specified percentage of total sort memory. Default
* value 0 means that no pages will be spilled and the newly incoming pages will be spilled,
* whereas value 100 means that all pages will be spilled and newly incoming pages will be loaded
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonV3DataFormatConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonV3DataFormatConstants.java
index 1dbf470..a170b8a 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonV3DataFormatConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonV3DataFormatConstants.java
@@ -49,7 +49,7 @@ public interface CarbonV3DataFormatConstants {
/**
* number of column to be read in one IO in query default value
*/
- String NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE = "10";
+ String NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULT_VALUE = "10";
/**
* number of column to be read in one IO in query max value
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java b/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
index df249dd..36fa20f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
@@ -56,7 +56,7 @@ public interface FileReader {
byte[] readByteArray(String filePath, int length) throws IOException;
/**
- * This method will be used to read int from file from postion(offset), here
+ * This method will be used to read int from file from position(offset), here
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
@@ -66,7 +66,7 @@ public interface FileReader {
int readInt(String filePath, long offset) throws IOException;
/**
- * This method will be used to read long from file from postion(offset), here
+ * This method will be used to read long from file from position(offset), here
* length will be always 8 because int byte size is 8
*
* @param filePath fully qualified file path
@@ -76,7 +76,7 @@ public interface FileReader {
long readLong(String filePath, long offset) throws IOException;
/**
- * This method will be used to read int from file from postion(offset), here
+ * This method will be used to read int from file from position(offset), here
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
@@ -85,7 +85,7 @@ public interface FileReader {
int readInt(String filePath) throws IOException;
/**
- * This method will be used to read long value from file from postion(offset), here
+ * This method will be used to read long value from file from position(offset), here
* length will be always 8 because long byte size if 4
*
* @param filePath fully qualified file path
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/ReusableDataBuffer.java b/core/src/main/java/org/apache/carbondata/core/datastore/ReusableDataBuffer.java
index d0add0c..63372fc 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/ReusableDataBuffer.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/ReusableDataBuffer.java
@@ -44,7 +44,7 @@ public class ReusableDataBuffer {
* If requested size is less it will return same buffer, if size is more
* it resize the buffer and return
* @param requestedSize
- * @return databuffer
+ * @return dataBuffer
*/
public byte[] getDataBuffer(int requestedSize) {
if (dataBuffer == null || requestedSize > size) {
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
index ae6507c..fe98e96 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
@@ -159,10 +159,10 @@ public class TableSpec {
this.dictDimActualPosition = new int[dictSortDimSpec.size()];
this.noDictDimActualPosition = new int[noDictSortDimSpec.size()];
for (int i = 0; i < dictDimActualPosition.length; i++) {
- dictDimActualPosition[i] = dictSortDimSpec.get(i).getActualPostion();
+ dictDimActualPosition[i] = dictSortDimSpec.get(i).getActualPosition();
}
for (int i = 0; i < noDictDimActualPosition.length; i++) {
- noDictDimActualPosition[i] = noDictSortDimSpec.get(i).getActualPostion();
+ noDictDimActualPosition[i] = noDictSortDimSpec.get(i).getActualPosition();
}
isUpdateNoDictDims = !noDictSortDimSpec.equals(noDictionaryDimensionSpec);
isUpdateDictDim = !dictSortDimSpec.equals(dictDimensionSpec);
@@ -342,13 +342,13 @@ public class TableSpec {
// indicate whether this dimension need to do inverted index
private boolean doInvertedIndex;
- // indicate the actual postion in blocklet
- private short actualPostion;
- DimensionSpec(ColumnType columnType, CarbonDimension dimension, short actualPostion) {
+ // indicate the actual position in blocklet
+ private short actualPosition;
+ DimensionSpec(ColumnType columnType, CarbonDimension dimension, short actualPosition) {
super(dimension.getColName(), dimension.getDataType(), columnType);
this.inSortColumns = dimension.isSortColumn();
this.doInvertedIndex = dimension.isUseInvertedIndex();
- this.actualPostion = actualPostion;
+ this.actualPosition = actualPosition;
}
public boolean isInSortColumns() {
@@ -359,8 +359,8 @@ public class TableSpec {
return doInvertedIndex;
}
- public short getActualPostion() {
- return actualPostion;
+ public short getActualPosition() {
+ return actualPosition;
}
@Override
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java b/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
index c93e162..3f9d310 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/block/AbstractIndex.java
@@ -52,8 +52,7 @@ public abstract class AbstractIndex implements Cacheable {
private long deleteDeltaTimestamp;
/**
- * map of blockletidAndPageId to
- * deleted rows
+ * map of blockletIdAndPageId to deleted rows
*/
private Map<String, DeleteDeltaVo> deletedRowsMap;
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
index 25540a7..1d291d2 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
@@ -50,19 +50,19 @@ public class SegmentProperties {
private static final Logger LOG =
LogServiceFactory.getLogService(SegmentProperties.class.getName());
- // When calcuting the fingerpinter of all columns. In order to
- // identify dimension columns with other column. The fingerprinter
- // of dimensions will leftshift 1 bit
+ // When calculating the finger printer of all columns. In order to
+ // identify dimension columns with other column. The finger printer
+ // of dimensions will left shift 1 bit
private static final int DIMENSIONS_FINGER_PRINTER_SHIFT = 1;
- // When calcuting the fingerpinter of all columns. In order to
- // identify measure columns with other column. The fingerprinter
- // of measures will leftshift 2 bit
+ // When calculating the finger pinter of all columns. In order to
+ // identify measure columns with other column. The finger printer
+ // of measures will left shift 2 bit
private static final int MEASURES_FINGER_PRINTER_SHIFT = 2;
- // When calcuting the fingerpinter of all columns. In order to
- // identify complex columns with other column. The fingerprinter
- // of complex columns will leftshift 3 bit
+ // When calculating the finger pinter of all columns. In order to
+ // identify complex columns with other column. The finger printer
+ // of complex columns will left shift 3 bit
private static final int COMPLEX_FINGER_PRINTER_SHIFT = 3;
/**
@@ -89,7 +89,7 @@ public class SegmentProperties {
* a block can have multiple columns. This will have block index as key
* and all dimension participated in that block as values
*/
- private Map<Integer, Set<Integer>> blockTodimensionOrdinalMapping;
+ private Map<Integer, Set<Integer>> blockToDimensionOrdinalMapping;
/**
* mapping of measure ordinal in schema to column chunk index in the data file
@@ -113,23 +113,23 @@ public class SegmentProperties {
private int lastDimensionColOrdinal;
/**
- * The fingerprinter is the xor result of all the columns in table.
- * Besides, in the case of two segmentproperties have same columns
- * but different sortcolumn, n like there is a column exists in both
- * segmentproperties, but is dimension in one segmentproperties,
+ * The finger printer is the xor result of all the columns in table.
+ * Besides, in the case of two segment properties have same columns
+ * but different sort column, n like there is a column exists in both
+ * segment properties, but is dimension in one segment properties,
* but is a measure in the other. In order to identify the difference
- * of these two segmentproperties. The xor result of all dimension
- * will leftshift 1 bit, the xor results of all measures will leftshift
- * 2bit, and the xor results of all complex columns will leftshift 3 bits
- * Sum up, the Formula of generate fingerprinter is
+ * of these two segment properties. The xor result of all dimension
+ * will left shift 1 bit, the xor results of all measures will left shift
+ * 2bit, and the xor results of all complex columns will left shift 3 bits
+ * Sum up, the Formula of generate finger printer is
*
- * fingerprinter = (dimensionfingerprinter >> 1)
- * ^ (measurefingerprinter >> 1) ^ (complexfingerprinter >> 1)
- * dimensionsfingerprinter = dimension1 ^ dimension2 ^ ...
- * measuresfingerprinter = measure1 ^ measure2 ^ measure3 ...
- * complexfingerprinter = complex1 ^ complex2 ^ complex3 ...
+ * fingerPrinter = (dimensionFingerPrinter >> 1)
+ * ^ (measureFingerPrinter >> 1) ^ (complexFingerPrinter >> 1)
+ * dimensionsFingerPrinter = dimension1 ^ dimension2 ^ ...
+ * measuresFingerPrinter = measure1 ^ measure2 ^ measure3 ...
+ * complexFingerPrinter = complex1 ^ complex2 ^ complex3 ...
*/
- private long fingerprinter = Long.MAX_VALUE;
+ private long fingerPrinter = Long.MAX_VALUE;
public SegmentProperties(List<ColumnSchema> columnsInTable) {
dimensions = new ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
@@ -139,7 +139,7 @@ public class SegmentProperties {
fillDimensionAndMeasureDetails(columnsInTable);
dimensionOrdinalToChunkMapping =
new HashMap<Integer, Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
- blockTodimensionOrdinalMapping =
+ blockToDimensionOrdinalMapping =
new HashMap<Integer, Set<Integer>>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
measuresOrdinalToChunkMapping =
new HashMap<Integer, Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
@@ -177,33 +177,33 @@ public class SegmentProperties {
Iterator<Entry<Integer, Integer>> blockItr = blocks.iterator();
while (blockItr.hasNext()) {
Entry<Integer, Integer> block = blockItr.next();
- Set<Integer> dimensionOrdinals = blockTodimensionOrdinalMapping.get(block.getValue());
+ Set<Integer> dimensionOrdinals = blockToDimensionOrdinalMapping.get(block.getValue());
if (dimensionOrdinals == null) {
dimensionOrdinals = new HashSet<Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
- blockTodimensionOrdinalMapping.put(block.getValue(), dimensionOrdinals);
+ blockToDimensionOrdinalMapping.put(block.getValue(), dimensionOrdinals);
}
dimensionOrdinals.add(block.getKey());
}
}
/**
- * compare the segmentproperties based on fingerprinter
+ * compare the segment properties based on finger printer
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SegmentProperties)) {
return false;
}
- // If these two segmentproperties have different number of columns
+ // If these two segment properties have different number of columns
// Return false directly
SegmentProperties segmentProperties = (SegmentProperties) obj;
if (this.getNumberOfColumns() != segmentProperties.getNumberOfColumns()) {
return false;
}
- // Compare the fingerprinter
- return getFingerprinter() != Long.MIN_VALUE &&
- segmentProperties.getFingerprinter() != Long.MIN_VALUE &&
- (getFingerprinter() == segmentProperties.getFingerprinter());
+ // Compare the finger printer
+ return getFingerPrinter() != Long.MIN_VALUE &&
+ segmentProperties.getFingerPrinter() != Long.MIN_VALUE &&
+ (getFingerPrinter() == segmentProperties.getFingerPrinter());
}
@Override
@@ -248,25 +248,25 @@ public class SegmentProperties {
}
/**
- * fingerprinter = (dimensionfingerprinter >> 1)
- * ^ (measurefingerprinter >> 1) ^ (complexfingerprinter >> 1)
- * dimensionsfingerprinter = dimension1 ^ dimension2 ^ ...
- * measuresfingerprinter = measure1 ^ measure2 ^ measure3 ...
- * complexfingerprinter = complex1 ^ complex2 ^ complex3 ...
+ * fingerPrinter = (dimensionFingerPrinter >> 1)
+ * ^ (measureFingerPrinter >> 1) ^ (complexFingerPrinter >> 1)
+ * dimensionsFingerPrinter = dimension1 ^ dimension2 ^ ...
+ * measuresFingerPrinter = measure1 ^ measure2 ^ measure3 ...
+ * complexFingerPrinter = complex1 ^ complex2 ^ complex3 ...
*/
- protected long getFingerprinter() {
- if (this.fingerprinter == Long.MAX_VALUE) {
+ protected long getFingerPrinter() {
+ if (this.fingerPrinter == Long.MAX_VALUE) {
long dimensionsFingerPrinter = getFingerprinter(this.dimensions.stream()
.map(t -> t.getColumnSchema()).collect(Collectors.toList()));
long measuresFingerPrinter = getFingerprinter(this.measures.stream()
.map(t -> t.getColumnSchema()).collect(Collectors.toList()));
long complexFingerPrinter = getFingerprinter(this.complexDimensions.stream()
.map(t -> t.getColumnSchema()).collect(Collectors.toList()));
- this.fingerprinter = (dimensionsFingerPrinter >> DIMENSIONS_FINGER_PRINTER_SHIFT)
+ this.fingerPrinter = (dimensionsFingerPrinter >> DIMENSIONS_FINGER_PRINTER_SHIFT)
^ (measuresFingerPrinter >> MEASURES_FINGER_PRINTER_SHIFT)
^ (complexFingerPrinter >> COMPLEX_FINGER_PRINTER_SHIFT);
}
- return this.fingerprinter;
+ return this.fingerPrinter;
}
private long getFingerprinter(List<ColumnSchema> columns) {
@@ -312,7 +312,7 @@ public class SegmentProperties {
columnSchema = columnsInTable.get(counter);
if (columnSchema.isDimensionColumn()) {
// not adding the cardinality of the non dictionary
- // column as it was not the part of mdkey
+ // column as it was not the part of MDKey
if (CarbonUtil.hasEncoding(columnSchema.getEncodingList(), Encoding.DICTIONARY)
&& !isComplexDimensionStarted && columnSchema.getNumberOfChild() == 0) {
this.numberOfDictDimensions++;
@@ -320,7 +320,7 @@ public class SegmentProperties {
if (columnSchema.isSortColumn()) {
this.numberOfSortColumns++;
}
- // if it is a columnar dimension participated in mdkey then added
+ // if it is a columnar dimension participated in MDKey then added
// key ordinal and dimension ordinal
carbonDimension =
new CarbonDimension(columnSchema, dimensionOrdinal++, keyOrdinal++, -1);
@@ -400,11 +400,11 @@ public class SegmentProperties {
for (int i = 0; i < parentDimension.getNumberOfChild(); i++) {
CarbonDimension dimension = parentDimension.getListOfChildDimensions().get(i);
if (dimension.getNumberOfChild() > 0) {
- dimension.setComplexTypeOridnal(++complexDimensionOrdinal);
+ dimension.setComplexTypeOrdinal(++complexDimensionOrdinal);
complexDimensionOrdinal = assignComplexOrdinal(dimension, complexDimensionOrdinal);
} else {
parentDimension.getListOfChildDimensions().get(i)
- .setComplexTypeOridnal(++complexDimensionOrdinal);
+ .setComplexTypeOrdinal(++complexDimensionOrdinal);
}
}
return complexDimensionOrdinal;
@@ -455,8 +455,8 @@ public class SegmentProperties {
/**
* @return It returns block index to dimension ordinal mapping
*/
- public Map<Integer, Set<Integer>> getBlockTodimensionOrdinalMapping() {
- return blockTodimensionOrdinalMapping;
+ public Map<Integer, Set<Integer>> getBlockToDimensionOrdinalMapping() {
+ return blockToDimensionOrdinalMapping;
}
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
index e0c8c6e..032eb0f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentPropertiesAndSchemaHolder.java
@@ -492,7 +492,7 @@ public class SegmentPropertiesAndSchemaHolder {
*/
private Set<String> segmentIdSet;
/**
- * index which maps to segmentPropertiesWrpper Index from where segmentProperties
+ * index which maps to segmentPropertiesWrapper Index from where segmentProperties
* can be retrieved
*/
private int segmentPropertiesIndex;
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java b/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
index 0edab5f..168bbf0 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
@@ -233,7 +233,7 @@ public class TableBlockInfo implements Distributable, Serializable {
int compareResult = 0;
// get the segment id
- // converr seg ID to double.
+ // convert segment ID to double.
double seg1 = Double.parseDouble(segment.getSegmentNo());
double seg2 = Double.parseDouble(((TableBlockInfo) other).segment.getSegmentNo());
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnPage.java
index e7ba267..fcb900c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnPage.java
@@ -84,10 +84,10 @@ public interface DimensionColumnPage {
/**
* @return whether column is dictionary column or not
*/
- boolean isNoDicitionaryColumn();
+ boolean isNoDictionaryColumn();
/**
- * @return whether columns where explictly sorted or not
+ * @return whether columns where explicitly sorted or not
*/
boolean isExplicitSorted();
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
index a5bbc5f..baee982 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
@@ -113,7 +113,7 @@ public abstract class AbstractDimensionColumnPage implements DimensionColumnPage
* @return column is dictionary column or not
*/
@Override
- public boolean isNoDicitionaryColumn() {
+ public boolean isNoDictionaryColumn() {
return false;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
index 2a71934..d823c47 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
@@ -79,8 +79,7 @@ public class VariableLengthDimensionColumnPage extends AbstractDimensionColumnPa
*/
@Override
public int fillRawData(int rowId, int offset, byte[] data) {
- // no required in this case because this column chunk is not the part if
- // mdkey
+ // no required in this case because this column chunk is not the part of MDKey
return 0;
}
@@ -101,7 +100,7 @@ public class VariableLengthDimensionColumnPage extends AbstractDimensionColumnPa
* @return whether column is dictionary column or not
*/
@Override
- public boolean isNoDicitionaryColumn() {
+ public boolean isNoDictionaryColumn() {
return true;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
index 2c599d4..a0e917f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
@@ -60,10 +60,10 @@ public class CarbonDataReaderFactory {
* @return dimension column data reader based on version number
*/
public DimensionColumnChunkReader getDimensionColumnChunkReader(ColumnarFormatVersion version,
- BlockletInfo blockletInfo, String filePath, boolean readPagebyPage) {
+ BlockletInfo blockletInfo, String filePath, boolean readPageByPage) {
switch (version) {
case V3:
- if (readPagebyPage) {
+ if (readPageByPage) {
return new DimensionChunkPageReaderV3(blockletInfo, filePath);
} else {
return new DimensionChunkReaderV3(blockletInfo, filePath);
@@ -82,10 +82,10 @@ public class CarbonDataReaderFactory {
* @return measure column data reader based on version number
*/
public MeasureColumnChunkReader getMeasureColumnChunkReader(ColumnarFormatVersion version,
- BlockletInfo blockletInfo, String filePath, boolean readPagebyPage) {
+ BlockletInfo blockletInfo, String filePath, boolean readPageByPage) {
switch (version) {
case V3:
- if (readPagebyPage) {
+ if (readPageByPage) {
return new MeasureChunkPageReaderV3(blockletInfo, filePath);
} else {
return new MeasureChunkReaderV3(blockletInfo, filePath);
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractDimensionChunkReader.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractDimensionChunkReader.java
index d14e69c..e6812c5 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractDimensionChunkReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractDimensionChunkReader.java
@@ -91,7 +91,7 @@ public abstract class AbstractDimensionChunkReader implements DimensionColumnChu
// read the column chunk based on block index and add
DimensionRawColumnChunk[] dataChunks =
new DimensionRawColumnChunk[dimensionChunksOffset.size()];
- // if blocklet index is empty then return empry data chunk
+ // if blocklet index is empty then return empty data chunk
if (columnIndexRange.length == 0) {
return dataChunks;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkPageReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkPageReaderV3.java
index 1ce8465..deafe48 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkPageReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkPageReaderV3.java
@@ -155,7 +155,7 @@ public class DimensionChunkPageReaderV3 extends DimensionChunkReaderV3 {
CarbonMetadataUtil.getCompressorNameFromChunkMeta(pageMetadata.getChunk_meta()));
}
// calculating the start point of data
- // as buffer can contain multiple column data, start point will be datachunkoffset +
+ // as buffer can contain multiple column data, start point will be data chunk offset +
// data chunk length + page offset
long offset = dimensionRawColumnChunk.getOffSet() + dimensionChunksLength
.get(dimensionRawColumnChunk.getColumnIndex()) + dataChunk3.getPage_offset()
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkReaderV3.java
index 1cfcbd1..d53c9d3 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/DimensionChunkReaderV3.java
@@ -98,7 +98,7 @@ public class DimensionChunkReaderV3 extends AbstractDimensionChunkReader {
// column other than last column we can subtract the offset of current column with
// next column and get the total length.
// but for last column we need to use lastDimensionOffset which is the end position
- // of the last dimension, we can subtract current dimension offset from lastDimesionOffset
+ // of the last dimension, we can subtract current dimension offset from lastDimensionOffset
if (dimensionChunksOffset.size() - 1 == columnIndex) {
length = (int) (lastDimensionOffsets - currentDimensionOffset);
} else {
@@ -225,7 +225,7 @@ public class DimensionChunkReaderV3 extends AbstractDimensionChunkReader {
pageMetadata.getChunk_meta());
this.compressor = CompressorFactory.getInstance().getCompressor(compressorName);
// calculating the start point of data
- // as buffer can contain multiple column data, start point will be datachunkoffset +
+ // as buffer can contain multiple column data, start point will be data chunk offset +
// data chunk length + page offset
int offset = (int) rawColumnPage.getOffSet() + dimensionChunksLength
.get(rawColumnPage.getColumnIndex()) + dataChunk3.getPage_offset().get(pageNumber);
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkPageReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkPageReaderV3.java
index 7776562..6ce717c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkPageReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkPageReaderV3.java
@@ -147,7 +147,7 @@ public class MeasureChunkPageReaderV3 extends MeasureChunkReaderV3 {
pageMetadata.getChunk_meta());
this.compressor = CompressorFactory.getInstance().getCompressor(compressorName);
// calculating the start point of data
- // as buffer can contain multiple column data, start point will be datachunkoffset +
+ // as buffer can contain multiple column data, start point will be data chunk offset +
// data chunk length + page offset
long offset = rawColumnPage.getOffSet() + measureColumnChunkLength
.get(rawColumnPage.getColumnIndex()) + dataChunk3.getPage_offset().get(pageNumber);
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkReaderV3.java
index 3a8e5f0..7ad92e5 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/MeasureChunkReaderV3.java
@@ -215,7 +215,7 @@ public class MeasureChunkReaderV3 extends AbstractMeasureChunkReader {
pageMetadata.getChunk_meta());
this.compressor = CompressorFactory.getInstance().getCompressor(compressorName);
// calculating the start point of data
- // as buffer can contain multiple column data, start point will be datachunkoffset +
+ // as buffer can contain multiple column data, start point will be data chunk offset +
// data chunk length + page offset
int offset = (int) rawColumnChunk.getOffSet() +
measureColumnChunkLength.get(rawColumnChunk.getColumnIndex()) +
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/ColumnPageWrapper.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/ColumnPageWrapper.java
index 5f1cac9..e63757d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/ColumnPageWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/ColumnPageWrapper.java
@@ -237,7 +237,7 @@ public class ColumnPageWrapper implements DimensionColumnPage {
}
@Override
- public boolean isNoDicitionaryColumn() {
+ public boolean isNoDictionaryColumn() {
return true;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/DimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/DimensionDataChunkStore.java
index 8972ddb..6a0f998 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/DimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/DimensionDataChunkStore.java
@@ -22,12 +22,12 @@ import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
/**
* Interface responsibility is to store dimension data in memory.
- * storage can be on heap or offheap.
+ * storage can be on heap or off-heap.
*/
public interface DimensionDataChunkStore {
/**
- * Below method will be used to put the rows and its metadata in offheap
+ * Below method will be used to put the rows and its metadata in off-heap
*
* @param invertedIndex inverted index to be stored
* @param invertedIndexReverse inverted index reverse to be stored
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/LocalDictDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/LocalDictDimensionDataChunkStore.java
index c57cc8d..de2b720 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/LocalDictDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/LocalDictDimensionDataChunkStore.java
@@ -25,7 +25,7 @@ import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.CarbonDictionary;
import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
import org.apache.carbondata.core.scan.result.vector.impl.directread.ColumnarVectorWrapperDirectFactory;
-import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertableVector;
+import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertibleVector;
import org.apache.carbondata.core.util.CarbonUtil;
/**
@@ -48,7 +48,7 @@ public class LocalDictDimensionDataChunkStore implements DimensionDataChunkStore
}
/**
- * Below method will be used to put the rows and its metadata in offheap
+ * Below method will be used to put the rows and its metadata in off-heap
*
* @param invertedIndex inverted index to be stored
* @param invertedIndexReverse inverted index reverse to be stored
@@ -87,8 +87,8 @@ public class LocalDictDimensionDataChunkStore implements DimensionDataChunkStore
dictionaryVector.putInt(i, surrogate);
}
}
- if (dictionaryVector instanceof ConvertableVector) {
- ((ConvertableVector) dictionaryVector).convert();
+ if (dictionaryVector instanceof ConvertibleVector) {
+ ((ConvertibleVector) dictionaryVector).convert();
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbsractDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractDimensionDataChunkStore.java
similarity index 93%
rename from core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbsractDimensionDataChunkStore.java
rename to core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractDimensionDataChunkStore.java
index 0a53ec6..6725393 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbsractDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractDimensionDataChunkStore.java
@@ -23,7 +23,7 @@ import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
/**
* Responsibility is to store dimension data
*/
-public abstract class SafeAbsractDimensionDataChunkStore implements DimensionDataChunkStore {
+public abstract class SafeAbstractDimensionDataChunkStore implements DimensionDataChunkStore {
/**
* data chunk for dimension column
@@ -43,19 +43,19 @@ public abstract class SafeAbsractDimensionDataChunkStore implements DimensionDat
/**
* to check whether dimension column was explicitly sorted or not
*/
- protected boolean isExplictSorted;
+ protected boolean isExplicitSorted;
/**
* Constructor
*
* @param isInvertedIdex is inverted index present
*/
- public SafeAbsractDimensionDataChunkStore(boolean isInvertedIdex) {
- this.isExplictSorted = isInvertedIdex;
+ public SafeAbstractDimensionDataChunkStore(boolean isInvertedIdex) {
+ this.isExplicitSorted = isInvertedIdex;
}
/**
- * Below method will be used to put the rows and its metadata in offheap
+ * Below method will be used to put the rows and its metadata in off-heap
*
* @param invertedIndex inverted index to be stored
* @param invertedIndexReverse inverted index reverse to be stored
@@ -126,7 +126,7 @@ public abstract class SafeAbsractDimensionDataChunkStore implements DimensionDat
*/
@Override
public boolean isExplicitSorted() {
- return isExplictSorted;
+ return isExplicitSorted;
}
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeFixedLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeFixedLengthDimensionDataChunkStore.java
index 80640ab..4327b7e 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeFixedLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeFixedLengthDimensionDataChunkStore.java
@@ -26,14 +26,14 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
import org.apache.carbondata.core.scan.result.vector.impl.directread.ColumnarVectorWrapperDirectFactory;
-import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertableVector;
+import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertibleVector;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.core.util.CarbonUtil;
/**
* Below class will be used to store fixed length dimension data
*/
-public class SafeFixedLengthDimensionDataChunkStore extends SafeAbsractDimensionDataChunkStore {
+public class SafeFixedLengthDimensionDataChunkStore extends SafeAbstractDimensionDataChunkStore {
/**
* Size of each value
@@ -58,8 +58,8 @@ public class SafeFixedLengthDimensionDataChunkStore extends SafeAbsractDimension
vector = ColumnarVectorWrapperDirectFactory
.getDirectVectorWrapperFactory(vector, invertedIndex, nullBits, deletedRows, false, false);
fillVector(data, vectorInfo, vector);
- if (vector instanceof ConvertableVector) {
- ((ConvertableVector) vector).convert();
+ if (vector instanceof ConvertibleVector) {
+ ((ConvertibleVector) vector).convert();
}
}
@@ -102,8 +102,8 @@ public class SafeFixedLengthDimensionDataChunkStore extends SafeAbsractDimension
*/
@Override
public byte[] getRow(int rowId) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
- if (isExplictSorted) {
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
+ if (isExplicitSorted) {
rowId = invertedIndexReverse[rowId];
}
// creating a row
@@ -123,8 +123,8 @@ public class SafeFixedLengthDimensionDataChunkStore extends SafeAbsractDimension
*/
@Override
public int getSurrogate(int index) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
- if (isExplictSorted) {
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
+ if (isExplicitSorted) {
index = invertedIndexReverse[index];
}
// below part is to convert the byte array to surrogate value
@@ -141,8 +141,8 @@ public class SafeFixedLengthDimensionDataChunkStore extends SafeAbsractDimension
*/
@Override
public void fillRow(int rowId, byte[] buffer, int offset) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
- if (isExplictSorted) {
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
+ if (isExplicitSorted) {
rowId = invertedIndexReverse[rowId];
}
//copy the row from memory block based on offset
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableIntLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableIntLengthDimensionDataChunkStore.java
index 8dc4c0b..c0ea251 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableIntLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableIntLengthDimensionDataChunkStore.java
@@ -23,7 +23,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
/**
* Below class is responsible to store variable long length(>32000) dimension data chunk in
- * memory. Memory occupied can be on heap or offheap using unsafe interface
+ * memory. Memory occupied can be on heap or off-heap using unsafe interface
*/
public class SafeVariableIntLengthDimensionDataChunkStore
extends SafeVariableLengthDimensionDataChunkStore {
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
index 5aab8d9..972fa97 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableLengthDimensionDataChunkStore.java
@@ -26,16 +26,16 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
import org.apache.carbondata.core.scan.result.vector.impl.directread.ColumnarVectorWrapperDirectFactory;
-import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertableVector;
+import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertibleVector;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.core.util.DataTypeUtil;
/**
* Below class is responsible to store variable length dimension data chunk in
- * memory. Memory occupied can be on heap or offheap using unsafe interface
+ * memory. Memory occupied can be on heap or off-heap using unsafe interface
*/
public abstract class SafeVariableLengthDimensionDataChunkStore
- extends SafeAbsractDimensionDataChunkStore {
+ extends SafeAbstractDimensionDataChunkStore {
/**
* total number of rows
@@ -59,7 +59,7 @@ public abstract class SafeVariableLengthDimensionDataChunkStore
}
/**
- * Below method will be used to put the rows and its metadata in offheap
+ * Below method will be used to put the rows and its metadata in off-heap
*
* @param invertedIndex inverted index to be stored
* @param invertedIndexReverse inverted index reverse to be stored
@@ -112,8 +112,8 @@ public abstract class SafeVariableLengthDimensionDataChunkStore
.getDirectVectorWrapperFactory(vector, invertedIndex, new BitSet(), vectorInfo.deletedRows,
false, false);
vectorFiller.fillVector(data, vector);
- if (vector instanceof ConvertableVector) {
- ((ConvertableVector) vector).convert();
+ if (vector instanceof ConvertibleVector) {
+ ((ConvertibleVector) vector).convert();
}
}
@@ -123,8 +123,8 @@ public abstract class SafeVariableLengthDimensionDataChunkStore
@Override
public byte[] getRow(int rowId) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
- if (isExplictSorted) {
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
+ if (isExplicitSorted) {
rowId = invertedIndexReverse[rowId];
}
// now to get the row from memory block we need to do following thing
@@ -150,8 +150,8 @@ public abstract class SafeVariableLengthDimensionDataChunkStore
@Override
public void fillRow(int rowId, CarbonColumnVector vector, int vectorRow) {
vector.setDictionary(null);
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
- if (isExplictSorted) {
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
+ if (isExplicitSorted) {
rowId = invertedIndexReverse[rowId];
}
// now to get the row from memory block we need to do following thing
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableShortLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableShortLengthDimensionDataChunkStore.java
index daac725..8a66e07 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableShortLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeVariableShortLengthDimensionDataChunkStore.java
@@ -23,7 +23,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
/**
* Below class is responsible to store variable long length(>32000) dimension data chunk in
- * memory. Memory occupied can be on heap or offheap using unsafe interface
+ * memory. Memory occupied can be on heap or off-heap using unsafe interface
*/
public class SafeVariableShortLengthDimensionDataChunkStore
extends SafeVariableLengthDimensionDataChunkStore {
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractDimensionDataChunkStore.java
index 23376d3..0ae3181 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractDimensionDataChunkStore.java
@@ -28,7 +28,7 @@ import org.apache.carbondata.core.util.ThreadLocalTaskInfo;
/**
* Responsibility is to store dimension data in memory. storage can be on heap
- * or offheap.
+ * or off-heap.
*/
public abstract class UnsafeAbstractDimensionDataChunkStore implements DimensionDataChunkStore {
@@ -68,20 +68,20 @@ public abstract class UnsafeAbstractDimensionDataChunkStore implements Dimension
* Constructor
*
* @param totalSize total size of the data to be kept
- * @param isInvertedIdex is inverted index present
+ * @param isInvertedIndex is inverted index present
* @param numberOfRows total number of rows
*/
- public UnsafeAbstractDimensionDataChunkStore(long totalSize, boolean isInvertedIdex,
+ public UnsafeAbstractDimensionDataChunkStore(long totalSize, boolean isInvertedIndex,
int numberOfRows, int dataLength) {
// allocating the data page
this.dataPageMemoryBlock = UnsafeMemoryManager.allocateMemoryWithRetry(taskId, totalSize);
this.dataLength = dataLength;
- this.isExplicitSorted = isInvertedIdex;
+ this.isExplicitSorted = isInvertedIndex;
}
/**
- * Below method will be used to put the rows and its metadata in offheap
+ * Below method will be used to put the rows and its metadata in off-heap
*
* @param invertedIndex inverted index to be stored
* @param invertedIndexReverse inverted index reverse to be stored
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeFixedLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeFixedLengthDimensionDataChunkStore.java
index f2464eb..9029772 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeFixedLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeFixedLengthDimensionDataChunkStore.java
@@ -22,7 +22,7 @@ import org.apache.carbondata.core.memory.CarbonUnsafe;
/**
* Below class is responsible to store fixed length dimension data chunk in
- * memory Memory occupied can be on heap or offheap using unsafe interface
+ * memory Memory occupied can be on heap or off-heap using unsafe interface
*/
public class UnsafeFixedLengthDimensionDataChunkStore
extends UnsafeAbstractDimensionDataChunkStore {
@@ -36,12 +36,12 @@ public class UnsafeFixedLengthDimensionDataChunkStore
* Constructor
*
* @param columnValueSize value of each column
- * @param isInvertedIdex is inverted index present
+ * @param isInvertedIndex is inverted index present
* @param numberOfRows total number of rows
*/
public UnsafeFixedLengthDimensionDataChunkStore(long totalDataSize, int columnValueSize,
- boolean isInvertedIdex, int numberOfRows, int dataLength) {
- super(totalDataSize, isInvertedIdex, numberOfRows, dataLength);
+ boolean isInvertedIndex, int numberOfRows, int dataLength) {
+ super(totalDataSize, isInvertedIndex, numberOfRows, dataLength);
this.columnValueSize = columnValueSize;
}
@@ -52,7 +52,7 @@ public class UnsafeFixedLengthDimensionDataChunkStore
*/
@Override
public byte[] getRow(int rowId) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
if (isExplicitSorted) {
rowId = CarbonUnsafe.getUnsafe().getInt(dataPageMemoryBlock.getBaseObject(),
dataPageMemoryBlock.getBaseOffset() + this.invertedIndexReverseOffset + ((long)rowId
@@ -77,7 +77,7 @@ public class UnsafeFixedLengthDimensionDataChunkStore
*/
@Override
public int getSurrogate(int index) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
if (isExplicitSorted) {
index = CarbonUnsafe.getUnsafe().getInt(dataPageMemoryBlock.getBaseObject(),
dataPageMemoryBlock.getBaseOffset() + this.invertedIndexReverseOffset + ((long)index
@@ -104,7 +104,7 @@ public class UnsafeFixedLengthDimensionDataChunkStore
*/
@Override
public void fillRow(int rowId, byte[] buffer, int offset) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
if (isExplicitSorted) {
rowId = CarbonUnsafe.getUnsafe().getInt(dataPageMemoryBlock.getBaseObject(),
dataPageMemoryBlock.getBaseOffset() + this.invertedIndexReverseOffset + ((long)rowId
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableIntLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableIntLengthDimensionDataChunkStore.java
index 80a7482..6fed7c6 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableIntLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableIntLengthDimensionDataChunkStore.java
@@ -23,13 +23,13 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
/**
* Below class is responsible to store variable length dimension data chunk in
- * memory Memory occupied can be on heap or offheap using unsafe interface
+ * memory Memory occupied can be on heap or off-heap using unsafe interface
*/
public class UnsafeVariableIntLengthDimensionDataChunkStore
extends UnsafeVariableLengthDimensionDataChunkStore {
- public UnsafeVariableIntLengthDimensionDataChunkStore(long totalSize, boolean isInvertedIdex,
+ public UnsafeVariableIntLengthDimensionDataChunkStore(long totalSize, boolean isInvertedIndex,
int numberOfRows, int dataLength) {
- super(totalSize, isInvertedIdex, numberOfRows, dataLength);
+ super(totalSize, isInvertedIndex, numberOfRows, dataLength);
}
@Override
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimensionDataChunkStore.java
index bd1e7c7..f44ea10 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableLengthDimensionDataChunkStore.java
@@ -26,7 +26,7 @@ import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
/**
* Below class is responsible to store variable length dimension data chunk in
- * memory Memory occupied can be on heap or offheap using unsafe interface
+ * memory Memory occupied can be on heap or off-heap using unsafe interface
*/
public abstract class UnsafeVariableLengthDimensionDataChunkStore
extends UnsafeAbstractDimensionDataChunkStore {
@@ -49,16 +49,16 @@ public abstract class UnsafeVariableLengthDimensionDataChunkStore
*/
private byte[] value;
- public UnsafeVariableLengthDimensionDataChunkStore(long totalSize, boolean isInvertedIdex,
+ public UnsafeVariableLengthDimensionDataChunkStore(long totalSize, boolean isInvertedIndex,
int numberOfRows, int dataLength) {
- super(totalSize, isInvertedIdex, numberOfRows, dataLength);
+ super(totalSize, isInvertedIndex, numberOfRows, dataLength);
this.numberOfRows = numberOfRows;
// initials size assigning to some random value
this.value = new byte[20];
}
/**
- * Below method will be used to put the rows and its metadata in offheap
+ * Below method will be used to put the rows and its metadata in off-heap
*
* @param invertedIndex inverted index to be stored
* @param invertedIndexReverse inverted index reverse to be stored
@@ -147,7 +147,7 @@ public abstract class UnsafeVariableLengthDimensionDataChunkStore
* @return actual row id
*/
private int getRowId(int rowId) {
- // if column was explicitly sorted we need to get the rowid based inverted index reverse
+ // if column was explicitly sorted we need to get the row id based inverted index reverse
if (isExplicitSorted) {
rowId = CarbonUnsafe.getUnsafe().getInt(dataPageMemoryBlock.getBaseObject(),
dataPageMemoryBlock.getBaseOffset() + this.invertedIndexReverseOffset + ((long)rowId
@@ -181,10 +181,10 @@ public abstract class UnsafeVariableLengthDimensionDataChunkStore
int length = 0;
// calculating the length of data
if (rowId < numberOfRows - 1) {
- int OffsetOfNextdata = CarbonUnsafe.getUnsafe().getInt(dataPageMemoryBlock.getBaseObject(),
+ int OffsetOfNextData = CarbonUnsafe.getUnsafe().getInt(dataPageMemoryBlock.getBaseObject(),
dataPageMemoryBlock.getBaseOffset() + this.dataPointersOffsets + ((rowId + 1)
* CarbonCommonConstants.INT_SIZE_IN_BYTE));
- length = OffsetOfNextdata - (currentDataOffset + getLengthSize());
+ length = OffsetOfNextData - (currentDataOffset + getLengthSize());
} else {
// for last record we need to subtract with data length
length = this.dataLength - currentDataOffset;
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableShortLengthDimensionDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableShortLengthDimensionDataChunkStore.java
index 502fc48..5f67b61 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableShortLengthDimensionDataChunkStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeVariableShortLengthDimensionDataChunkStore.java
@@ -23,13 +23,13 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
/**
* Below class is responsible to store variable length dimension data chunk in
- * memory Memory occupied can be on heap or offheap using unsafe interface
+ * memory Memory occupied can be on heap or off-heap using unsafe interface
*/
public class UnsafeVariableShortLengthDimensionDataChunkStore
extends UnsafeVariableLengthDimensionDataChunkStore {
- public UnsafeVariableShortLengthDimensionDataChunkStore(long totalSize, boolean isInvertedIdex,
+ public UnsafeVariableShortLengthDimensionDataChunkStore(long totalSize, boolean isInvertedIndex,
int numberOfRows, int dataLength) {
- super(totalSize, isInvertedIdex, numberOfRows, dataLength);
+ super(totalSize, isInvertedIndex, numberOfRows, dataLength);
}
@Override
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ByteArrayBlockIndexerStorage.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ByteArrayBlockIndexerStorage.java
index f5117cc..b65ac52 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ByteArrayBlockIndexerStorage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ByteArrayBlockIndexerStorage.java
@@ -46,17 +46,17 @@ public class ByteArrayBlockIndexerStorage extends BlockIndexerStorage<byte[][]>
*/
private ByteArrayColumnWithRowId[] createColumnWithRowId(byte[][] dataPage,
boolean isNoDictionary) {
- ByteArrayColumnWithRowId[] columnWithIndexs = new ByteArrayColumnWithRowId[dataPage.length];
+ ByteArrayColumnWithRowId[] columnWithIndexes = new ByteArrayColumnWithRowId[dataPage.length];
if (isNoDictionary) {
- for (short i = 0; i < columnWithIndexs.length; i++) {
- columnWithIndexs[i] = new ByteArrayColumnWithRowId(dataPage[i], i);
+ for (short i = 0; i < columnWithIndexes.length; i++) {
+ columnWithIndexes[i] = new ByteArrayColumnWithRowId(dataPage[i], i);
}
} else {
- for (short i = 0; i < columnWithIndexs.length; i++) {
- columnWithIndexs[i] = new ByteArrayColumnWithRowId(dataPage[i], i);
+ for (short i = 0; i < columnWithIndexes.length; i++) {
+ columnWithIndexes[i] = new ByteArrayColumnWithRowId(dataPage[i], i);
}
}
- return columnWithIndexs;
+ return columnWithIndexes;
}
private short[] extractDataAndReturnRowId(ByteArrayColumnWithRowId[] dataWithRowId,
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ObjectArrayBlockIndexerStorage.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ObjectArrayBlockIndexerStorage.java
index 27c26a9..23f1c67 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ObjectArrayBlockIndexerStorage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/ObjectArrayBlockIndexerStorage.java
@@ -45,12 +45,12 @@ public class ObjectArrayBlockIndexerStorage extends BlockIndexerStorage<Object[]
* @return
*/
private ObjectColumnWithRowId[] createColumnWithRowId(Object[] dataPage) {
- ObjectColumnWithRowId[] columnWithIndexs =
+ ObjectColumnWithRowId[] columnWithIndexes =
new ObjectColumnWithRowId[dataPage.length];
- for (short i = 0; i < columnWithIndexs.length; i++) {
- columnWithIndexs[i] = new ObjectColumnWithRowId(dataPage[i], i, dataType);
+ for (short i = 0; i < columnWithIndexes.length; i++) {
+ columnWithIndexes[i] = new ObjectColumnWithRowId(dataPage[i], i, dataType);
}
- return columnWithIndexs;
+ return columnWithIndexes;
}
private short[] extractDataAndReturnRowId(ObjectColumnWithRowId[] dataWithRowId,
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/CompressorFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/CompressorFactory.java
index e695bda..5b37cae 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/CompressorFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/CompressorFactory.java
@@ -103,8 +103,8 @@ public class CompressorFactory {
+ " found '%s'", compressorClassName, ((Compressor) instance).getName()));
}
allSupportedCompressors.put(compressorClassName, (Compressor) instance);
- LOGGER.info(
- String.format("sucessfully register compressor %s to carbondata", compressorClassName));
+ LOGGER.info(String.format(
+ "successfully register compressor %s to carbondata", compressorClassName));
return (Compressor) instance;
} else {
throw new RuntimeException(
@@ -142,7 +142,7 @@ public class CompressorFactory {
}
// if we specify the compressor name in table property, carbondata now will convert the
- // property value to lowercase, so here we will ingore the case and find the real name.
+ // property value to lowercase, so here we will ignore the case and find the real name.
private String getInternalCompressorName(String name) {
for (String key : allSupportedCompressors.keySet()) {
if (key.equalsIgnoreCase(name)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/GzipCompressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/GzipCompressor.java
index 390029a..102786b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/GzipCompressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/GzipCompressor.java
@@ -143,7 +143,7 @@ public class GzipCompressor extends AbstractCompressor {
@Override
public long rawUncompress(byte[] input, byte[] output) {
//gzip api doesnt have rawUncompress yet.
- throw new RuntimeException("Not implemented rawUcompress for gzip yet");
+ throw new RuntimeException("Not implemented rawUncompress for gzip yet");
}
@Override
@@ -165,6 +165,6 @@ public class GzipCompressor extends AbstractCompressor {
@Override
public int rawUncompress(byte[] data, int offset, int length, byte[] output) {
//gzip api doesnt have rawUncompress yet.
- throw new RuntimeException("Not implemented rawUcompress for gzip yet");
+ throw new RuntimeException("Not implemented rawUncompress for gzip yet");
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
index 7e12dc9..f5cb539 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
@@ -160,9 +160,9 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
}
@Override
- public boolean renameTo(String changetoName) {
+ public boolean renameTo(String changeToName) {
try {
- return fileSystem.rename(path, new Path(changetoName));
+ return fileSystem.rename(path, new Path(changeToName));
} catch (IOException e) {
throw new CarbonFileException("Failed to rename file: ", e);
}
@@ -293,7 +293,7 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
}
/**
- * return the datainputStream which is seek to the offset of file
+ * return the DataInputStream which is seek to the offset of file
*
* @return DataInputStream
* @throws IOException
@@ -346,7 +346,7 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
} else if ("LZ4".equalsIgnoreCase(compressorName)) {
return Lz4Codec.class.getName();
} else {
- throw new IOException("Unsuppotted compressor: " + compressorName);
+ throw new IOException("Unsupported compressor: " + compressorName);
}
}
@@ -408,7 +408,7 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
permission =
FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(fileSystem.getConf()));
}
- // Pass the permissions duringg file creation itself
+ // Pass the permissions during file creation itself
fileSystem
.create(path, permission, false, fileSystem.getConf().getInt("io.file.buffer.size", 4096),
fileSystem.getDefaultReplication(path), fileSystem.getDefaultBlockSize(path), null)
@@ -437,12 +437,12 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
if (fileSystem.exists(path)) {
return false;
} else {
- // Pass the permissions duringg file creation itself
+ // Pass the permissions during file creation itself
fileSystem.create(path, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL), false,
fileSystem.getConf().getInt("io.file.buffer.size", 4096),
fileSystem.getDefaultReplication(path), fileSystem.getDefaultBlockSize(path), null)
.close();
- // haddop masks the permission accoding to configured permission, so need to set permission
+ // hadoop masks the permission according to configured permission, so need to set permission
// forcefully
fileSystem.setPermission(path, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
return true;
@@ -455,14 +455,14 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
try {
listStatus = fileSystem.listStatus(path);
} catch (IOException e) {
- LOGGER.warn("Exception occured: " + e.getMessage(), e);
+ LOGGER.warn("Exception occurred: " + e.getMessage(), e);
return new CarbonFile[0];
}
return getFiles(listStatus);
}
/**
- * Get the CarbonFiles from filestatus array
+ * Get the CarbonFiles from FileStatus array
*/
protected abstract CarbonFile[] getFiles(FileStatus[] listStatus);
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
index 1a748af..ae2cd7c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFile.java
@@ -71,10 +71,10 @@ public class AlluxioCarbonFile extends HDFSCarbonFile {
}
@Override
- public boolean renameForce(String changetoName) {
+ public boolean renameForce(String changeToName) {
try {
if (fileSystem instanceof DistributedFileSystem) {
- ((DistributedFileSystem) fileSystem).rename(path, new Path(changetoName),
+ ((DistributedFileSystem) fileSystem).rename(path, new Path(changeToName),
org.apache.hadoop.fs.Options.Rename.OVERWRITE);
return true;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
index 7d36f0e..53d2e99 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
@@ -72,22 +72,22 @@ public class HDFSCarbonFile extends AbstractDFSCarbonFile {
}
@Override
- public boolean renameForce(String changetoName) {
+ public boolean renameForce(String changeToName) {
try {
if (fileSystem instanceof DistributedFileSystem) {
- ((DistributedFileSystem) fileSystem).rename(path, new Path(changetoName),
+ ((DistributedFileSystem) fileSystem).rename(path, new Path(changeToName),
org.apache.hadoop.fs.Options.Rename.OVERWRITE);
return true;
} else if ((fileSystem instanceof FilterFileSystem) && (((FilterFileSystem) fileSystem)
.getRawFileSystem() instanceof DistributedFileSystem)) {
((DistributedFileSystem) ((FilterFileSystem) fileSystem).getRawFileSystem())
- .rename(path, new Path(changetoName), org.apache.hadoop.fs.Options.Rename.OVERWRITE);
+ .rename(path, new Path(changeToName), org.apache.hadoop.fs.Options.Rename.OVERWRITE);
return true;
} else {
- return fileSystem.rename(path, new Path(changetoName));
+ return fileSystem.rename(path, new Path(changeToName));
}
} catch (IOException e) {
- LOGGER.error("Exception occured: " + e.getMessage(), e);
+ LOGGER.error("Exception occurred: " + e.getMessage(), e);
return false;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
index 1bb1c99..1cc7242 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
@@ -117,7 +117,7 @@ public class LocalCarbonFile implements CarbonFile {
try {
return file.getCanonicalPath();
} catch (IOException e) {
- LOGGER.error("Exception occured" + e.getMessage(), e);
+ LOGGER.error("Exception occurred" + e.getMessage(), e);
}
return null;
}
@@ -137,9 +137,9 @@ public class LocalCarbonFile implements CarbonFile {
return file.length();
}
- public boolean renameTo(String changetoName) {
- changetoName = FileFactory.getUpdatedFilePath(changetoName);
- return file.renameTo(new File(changetoName));
+ public boolean renameTo(String changeToName) {
+ changeToName = FileFactory.getUpdatedFilePath(changeToName);
+ return file.renameTo(new File(changeToName));
}
public boolean delete() {
@@ -256,7 +256,7 @@ public class LocalCarbonFile implements CarbonFile {
tempFile.renameForce(fileName);
fileTruncatedSuccessfully = true;
} catch (IOException e) {
- LOGGER.error("Exception occured while truncating the file " + e.getMessage(), e);
+ LOGGER.error("Exception occurred while truncating the file " + e.getMessage(), e);
} finally {
CarbonUtil.closeStreams(source, destination);
}
@@ -331,7 +331,7 @@ public class LocalCarbonFile implements CarbonFile {
}
/**
- * return the datainputStream which is seek to the offset of file
+ * return the DataInputStream which is seek to the offset of file
*
* @param bufferSize
* @param offset
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
index c0e9aba..5136b8e 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/S3CarbonFile.java
@@ -59,15 +59,15 @@ public class S3CarbonFile extends HDFSCarbonFile {
Refer CARBONDATA-2670 for tracking this.
*/
@Override
- public boolean renameForce(String changetoName) {
+ public boolean renameForce(String changeToName) {
try {
// check if any file with the new name exists and delete it.
- CarbonFile newCarbonFile = FileFactory.getCarbonFile(changetoName);
+ CarbonFile newCarbonFile = FileFactory.getCarbonFile(changeToName);
newCarbonFile.delete();
// rename the old file to the new name.
- return fileSystem.rename(path, new Path(changetoName));
+ return fileSystem.rename(path, new Path(changeToName));
} catch (IOException e) {
- LOGGER.error("Exception occured: " + e.getMessage(), e);
+ LOGGER.error("Exception occurred: " + e.getMessage(), e);
return false;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
index 4f90cd1..4b9294c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/ViewFSCarbonFile.java
@@ -90,7 +90,7 @@ public class ViewFSCarbonFile extends AbstractDFSCarbonFile {
return false;
}
} catch (IOException e) {
- LOGGER.error("Exception occured" + e.getMessage(), e);
+ LOGGER.error("Exception occurred" + e.getMessage(), e);
return false;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
index 6fe3242..1233b5f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
@@ -175,7 +175,7 @@ public final class FileFactory {
}
/**
- * Need carbonfile object path because depends on file format implementation
+ * Need carbon file object path because depends on file format implementation
* path will be formatted.
*/
public static String getFormattedPath(String path) {
@@ -224,7 +224,7 @@ public final class FileFactory {
}
/**
- * return the datainputStream which is seek to the offset of file
+ * return the DataInputStream which is seek to the offset of file
*
* @param path
* @param bufferSize
@@ -354,7 +354,7 @@ public final class FileFactory {
}
/**
- * for getting the dataoutput stream using the hdfs filesystem append API.
+ * for getting the DataOutputStream using the hdfs filesystem append API.
*
* @param path
* @return
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
index 4b2c368..9d7e2f1 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
@@ -82,7 +82,7 @@ public class FileReaderImpl implements FileReader {
}
/**
- * This method will be used to read int from file from postion(offset), here
+ * This method will be used to read int from file from position(offset), here
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
@@ -97,7 +97,7 @@ public class FileReaderImpl implements FileReader {
}
/**
- * This method will be used to read int from file from postion(offset), here
+ * This method will be used to read int from file from position(offset), here
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
@@ -111,7 +111,7 @@ public class FileReaderImpl implements FileReader {
}
/**
- * This method will be used to read int from file from postion(offset), here
+ * This method will be used to read int from file from position(offset), here
* length will be always 4 because int byte size if 4
*
* @param filePath fully qualified file path
@@ -189,7 +189,7 @@ public class FileReaderImpl implements FileReader {
}
/**
- * This method will be used to read long from file from postion(offset), here
+ * This method will be used to read long from file from position(offset), here
* length will be always 8 because int byte size is 8
*
* @param filePath fully qualified file path
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
index f9de81a..8127f26 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
@@ -39,7 +39,7 @@ public interface FileTypeInterface {
* Check if the FileSystem mapped with the given path is supported or not.
*
* @param path path of the file
- * @return true if supported, fasle if not supported
+ * @return true if supported, false if not supported
*/
public boolean isPathSupported(String path);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
index 0722ddf..45a5658 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
@@ -31,7 +31,7 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.util.DataTypeUtil;
/**
- * holds the complex columndata and its children data
+ * holds the complex column data and its children data
*/
public class ComplexColumnPage {
@@ -63,7 +63,7 @@ public class ComplexColumnPage {
}
/**
- * below method will be used to initlize the column page of complex type
+ * below method will be used to initialize the column page of complex type
* @param columnToDictMap dictionary map
* @param pageSize number of records
*/
@@ -180,7 +180,7 @@ public class ComplexColumnPage {
* return the column page
* @param complexColumnIndex
* complexColumnIndex of column
- * @return colum page
+ * @return column page
*/
public ColumnPage getColumnPage(int complexColumnIndex) {
assert (complexColumnIndex <= this.complexColumnIndex);
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/DecoderBasedFallbackEncoder.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/DecoderBasedFallbackEncoder.java
index 017e605..30d07f7 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/DecoderBasedFallbackEncoder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/DecoderBasedFallbackEncoder.java
@@ -103,7 +103,7 @@ public class DecoderBasedFallbackEncoder implements Callable<FallbackEncodedColu
encodedColumnPage.getActualPage().getPageSize());
// uncompressed data from encoded column page is dictionary data, get the dictionary data using
- // keygenerator
+ // KeyGenerator
KeyGenerator keyGenerator = KeyGeneratorFactory
.getKeyGenerator(new int[] { CarbonCommonConstants.LOCAL_DICTIONARY_MAX + 1 });
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java
index 8a3482a..d379a52 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java
@@ -33,7 +33,7 @@ public class EncodedTablePage {
// number of row in this page
private int pageSize;
- // size in bytes of all encoded columns (including data and metadate)
+ // size in bytes of all encoded columns (including data and metadata)
private int encodedSize;
public static EncodedTablePage newInstance(int pageSize,
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
index 797dd11..05fed58 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/LocalDictColumnPage.java
@@ -76,7 +76,7 @@ public class LocalDictColumnPage extends ColumnPage {
/**
* Create a new column page with input data type and page size.
*/
- protected LocalDictColumnPage(ColumnPage actualDataColumnPage, ColumnPage encodedColumnpage,
+ protected LocalDictColumnPage(ColumnPage actualDataColumnPage, ColumnPage encodedColumnPage,
LocalDictionaryGenerator localDictionaryGenerator, boolean isComplexTypePrimitive,
boolean isDecoderBasedFallBackEnabled) {
super(actualDataColumnPage.getColumnPageEncoderMeta(), actualDataColumnPage.getPageSize());
@@ -86,13 +86,13 @@ public class LocalDictColumnPage extends ColumnPage {
pageLevelDictionary = new PageLevelDictionary(localDictionaryGenerator,
actualDataColumnPage.getColumnSpec().getFieldName(), actualDataColumnPage.getDataType(),
isComplexTypePrimitive, actualDataColumnPage.getColumnCompressorName());
- this.encodedDataColumnPage = encodedColumnpage;
+ this.encodedDataColumnPage = encodedColumnPage;
this.keyGenerator = KeyGeneratorFactory
.getKeyGenerator(new int[] { CarbonCommonConstants.LOCAL_DICTIONARY_MAX + 1 });
this.dummyKey = new int[1];
} else {
// else free the encoded column page memory as its of no use
- encodedColumnpage.freeMemory();
+ encodedColumnPage.freeMemory();
}
this.isDecoderBasedFallBackEnabled = isDecoderBasedFallBackEnabled;
this.actualDataColumnPage = actualDataColumnPage;
@@ -118,7 +118,7 @@ public class LocalDictColumnPage extends ColumnPage {
/**
* Below method will be used to check whether page is local dictionary
- * generated or not. This will be used for while enoding the the page
+ * generated or not. This will be used for while encoding the the page
*
* @return
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/SafeFixLengthColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/SafeFixLengthColumnPage.java
index 23f6288..d6f5d0d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/SafeFixLengthColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/SafeFixLengthColumnPage.java
@@ -40,7 +40,7 @@ public class SafeFixLengthColumnPage extends ColumnPage {
private float[] floatData;
private double[] doubleData;
private byte[] shortIntData;
- private byte[][] fixedLengthdata;
+ private byte[][] fixedLengthData;
private int totalLength;
// total number of entries in array
@@ -122,7 +122,7 @@ public class SafeFixLengthColumnPage extends ColumnPage {
@Override
public void putBytes(int rowId, byte[] bytes) {
ensureArraySize(rowId, DataTypes.BYTE_ARRAY);
- this.fixedLengthdata[rowId] = bytes;
+ this.fixedLengthData[rowId] = bytes;
arrayElementCount++;
totalLength += bytes.length;
}
@@ -218,7 +218,7 @@ public class SafeFixLengthColumnPage extends ColumnPage {
@Override
public byte[] getBytes(int rowId) {
- return this.fixedLengthdata[rowId];
+ return this.fixedLengthData[rowId];
}
/**
@@ -284,7 +284,7 @@ public class SafeFixLengthColumnPage extends ColumnPage {
public byte[][] getByteArrayPage() {
byte[][] data = new byte[arrayElementCount][];
for (int i = 0; i < arrayElementCount; i++) {
- data[i] = fixedLengthdata[i];
+ data[i] = fixedLengthData[i];
}
return data;
}
@@ -300,7 +300,7 @@ public class SafeFixLengthColumnPage extends ColumnPage {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(stream);
for (int i = 0; i < arrayElementCount; i++) {
- out.write(fixedLengthdata[i]);
+ out.write(fixedLengthData[i]);
}
return stream.toByteArray();
}
@@ -384,7 +384,7 @@ public class SafeFixLengthColumnPage extends ColumnPage {
floatData = null;
doubleData = null;
shortIntData = null;
- fixedLengthdata = null;
+ fixedLengthData = null;
}
/**
@@ -467,16 +467,16 @@ public class SafeFixLengthColumnPage extends ColumnPage {
doubleData = newArray;
}
} else if (dataType == DataTypes.BYTE_ARRAY) {
- if (fixedLengthdata == null) {
- fixedLengthdata = new byte[pageSize][];
+ if (fixedLengthData == null) {
+ fixedLengthData = new byte[pageSize][];
}
- if (requestSize >= fixedLengthdata.length) {
+ if (requestSize >= fixedLengthData.length) {
byte[][] newArray = new byte[arrayElementCount * 2][];
int index = 0;
- for (byte[] data : fixedLengthdata) {
+ for (byte[] data : fixedLengthData) {
newArray[index++] = data;
}
- fixedLengthdata = newArray;
+ fixedLengthData = newArray;
}
} else {
throw new UnsupportedOperationException(
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
index 182d0d4..7606a9c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoder.java
@@ -133,14 +133,14 @@ public abstract class ColumnPageEncoder {
private List<ByteBuffer> buildEncoderMeta(ColumnPage inputPage) throws IOException {
ColumnPageEncoderMeta meta = getEncoderMeta(inputPage);
- List<ByteBuffer> metaDatas = new ArrayList<>();
+ List<ByteBuffer> metaData = new ArrayList<>();
if (meta != null) {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(stream);
meta.write(out);
- metaDatas.add(ByteBuffer.wrap(stream.toByteArray()));
+ metaData.add(ByteBuffer.wrap(stream.toByteArray()));
}
- return metaDatas;
+ return metaData;
}
private void fillMinMaxIndex(ColumnPage inputPage, DataChunk2 dataChunk) {
@@ -176,8 +176,8 @@ public abstract class ColumnPageEncoder {
* `buildPageMetadata` will call this for backward compatibility
*/
protected void fillLegacyFields(DataChunk2 dataChunk) {
- // Subclass should override this to update datachunk2 if any backward compatibility if required,
- // For example, when using IndexStorageCodec, rle_page_length and rowid_page_length need to be
+ // Subclass should override this to update DataChunk2 if any backward compatibility if required,
+ // For example, when using IndexStorageCodec, rle_page_length and rowId_page_length need to be
// updated
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoderMeta.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoderMeta.java
index f04d38a..a1e3765 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoderMeta.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageEncoderMeta.java
@@ -49,7 +49,7 @@ public class ColumnPageEncoderMeta extends ValueEncoderMeta implements Writable
// Make it protected for RLEEncoderMeta
protected String compressorName;
- // Whether the flow shoild go to fill complete vector while decoding the page.
+ // Whether the flow should go to fill complete vector while decoding the page.
private transient boolean fillCompleteVector;
public ColumnPageEncoderMeta() {
@@ -58,7 +58,7 @@ public class ColumnPageEncoderMeta extends ValueEncoderMeta implements Writable
public ColumnPageEncoderMeta(TableSpec.ColumnSpec columnSpec, DataType storeDataType,
String compressorName) {
if (columnSpec == null) {
- throw new IllegalArgumentException("columm spec must not be null");
+ throw new IllegalArgumentException("column spec must not be null");
}
if (storeDataType == null) {
throw new IllegalArgumentException("store data type must not be null");
@@ -199,7 +199,7 @@ public class ColumnPageEncoderMeta extends ValueEncoderMeta implements Writable
byte[] min = new byte[in.readShort()];
in.readFully(min);
this.setMinValue(DataTypeUtil.byteToBigDecimal(min));
- // unique value is obsoleted, maintain for compatiability
+ // unique value is obsoleted, maintain for compatibility
short uniqueLength = in.readShort();
in.readFully(new byte[uniqueLength]);
// scale field is obsoleted. It is stored in the schema data type in columnSpec
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaIntegralCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaIntegralCodec.java
index 573c225..67e384a 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaIntegralCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaIntegralCodec.java
@@ -42,7 +42,7 @@ import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
import org.apache.carbondata.core.scan.result.vector.impl.directread.ColumnarVectorWrapperDirectFactory;
-import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertableVector;
+import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertibleVector;
import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.format.DataChunk2;
@@ -325,8 +325,8 @@ public class AdaptiveDeltaIntegralCodec extends AdaptiveCodec {
vector.putNull(i);
}
}
- if (vector instanceof ConvertableVector) {
- ((ConvertableVector) vector).convert();
+ if (vector instanceof ConvertibleVector) {
+ ((ConvertibleVector) vector).convert();
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveIntegralCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveIntegralCodec.java
index 3554cd3..40b3331 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveIntegralCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveIntegralCodec.java
@@ -41,7 +41,7 @@ import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
import org.apache.carbondata.core.scan.result.vector.impl.directread.ColumnarVectorWrapperDirectFactory;
-import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertableVector;
+import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertibleVector;
import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.format.DataChunk2;
@@ -299,8 +299,8 @@ public class AdaptiveIntegralCodec extends AdaptiveCodec {
vector.putNull(i);
}
}
- if (vector instanceof ConvertableVector) {
- ((ConvertableVector) vector).convert();
+ if (vector instanceof ConvertibleVector) {
+ ((ConvertibleVector) vector).convert();
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
index 5fff9c2..9d47886 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/compress/DirectCompressCodec.java
@@ -42,7 +42,7 @@ import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
import org.apache.carbondata.core.scan.result.vector.impl.directread.ColumnarVectorWrapperDirectFactory;
-import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertableVector;
+import org.apache.carbondata.core.scan.result.vector.impl.directread.ConvertibleVector;
import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.format.Encoding;
@@ -253,8 +253,8 @@ public class DirectCompressCodec implements ColumnPageCodec {
vector.putNull(i);
}
}
- if (vector instanceof ConvertableVector) {
- ((ConvertableVector) vector).convert();
+ if (vector instanceof ConvertibleVector) {
+ ((ConvertibleVector) vector).convert();
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/enums/EscapeSequences.java b/core/src/main/java/org/apache/carbondata/core/enums/EscapeSequences.java
index 4bbdddf..fe6e01a 100644
--- a/core/src/main/java/org/apache/carbondata/core/enums/EscapeSequences.java
+++ b/core/src/main/java/org/apache/carbondata/core/enums/EscapeSequences.java
@@ -27,7 +27,7 @@ public enum EscapeSequences {
private String name;
/**
- * unicode of the escapechar
+ * unicode of the escape char
*/
private char escapeChar;
diff --git a/core/src/main/java/org/apache/carbondata/core/index/IndexChooser.java b/core/src/main/java/org/apache/carbondata/core/index/IndexChooser.java
index 4c8bcc5..06cd079 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/IndexChooser.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/IndexChooser.java
@@ -43,7 +43,7 @@ import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.TrueConditio
/**
* This chooser does 2 jobs.
- * 1. Based on filter expression it converts the available Indexs to Index expression.
+ * 1. Based on filter expression it converts the available Indexes to Index expression.
* For example, there are 2 Indexes available on table1
* Index1 : column1
* Index2 : column2
diff --git a/core/src/main/java/org/apache/carbondata/core/index/IndexFilter.java b/core/src/main/java/org/apache/carbondata/core/index/IndexFilter.java
index 758d30d..cbb41c1 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/IndexFilter.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/IndexFilter.java
@@ -36,7 +36,7 @@ import org.apache.carbondata.core.scan.expression.conditional.InExpression;
import org.apache.carbondata.core.scan.expression.logical.AndExpression;
import org.apache.carbondata.core.scan.filter.FilterExpressionProcessor;
import org.apache.carbondata.core.scan.filter.intf.FilterOptimizer;
-import org.apache.carbondata.core.scan.filter.optimizer.RangeFilterOptmizer;
+import org.apache.carbondata.core.scan.filter.optimizer.RangeFilterOptimizer;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.scan.model.QueryModel;
import org.apache.carbondata.core.util.ObjectSerializationUtil;
@@ -231,7 +231,7 @@ public class IndexFilter implements Serializable {
processFilterExpressionWithoutRange(isFilterDimensions, isFilterMeasures);
if (null != expression) {
// Optimize Filter Expression and fit RANGE filters is conditions apply.
- FilterOptimizer rangeFilterOptimizer = new RangeFilterOptmizer(expression);
+ FilterOptimizer rangeFilterOptimizer = new RangeFilterOptimizer(expression);
rangeFilterOptimizer.optimizeFilter();
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/IndexInputFormat.java b/core/src/main/java/org/apache/carbondata/core/index/IndexInputFormat.java
index cd82345..a6e02527 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/IndexInputFormat.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/IndexInputFormat.java
@@ -93,7 +93,7 @@ public class IndexInputFormat extends FileInputFormat<Void, ExtendedBlocklet>
private boolean isCountStarJob = false;
- // Whether AsyncCall to the Index Server(true in the case of prepriming)
+ // Whether AsyncCall to the Index Server(true in the case of pre-priming)
private boolean isAsyncCall;
IndexInputFormat() {
@@ -128,11 +128,11 @@ public class IndexInputFormat extends FileInputFormat<Void, ExtendedBlocklet>
@Override
public List<InputSplit> getSplits(JobContext job) throws IOException {
- List<IndexInputSplitWrapper> distributables;
- distributables =
+ List<IndexInputSplitWrapper> distributableList;
+ distributableList =
IndexChooser.getDefaultIndex(table, filterResolverIntf).toDistributable(validSegments);
- List<InputSplit> inputSplits = new ArrayList<>(distributables.size());
- inputSplits.addAll(distributables);
+ List<InputSplit> inputSplits = new ArrayList<>(distributableList.size());
+ inputSplits.addAll(distributableList);
return inputSplits;
}
@@ -356,19 +356,19 @@ public class IndexInputFormat extends FileInputFormat<Void, ExtendedBlocklet>
* then need to cut as transferring big query to IndexServer will be costly.
*/
public void setTaskGroupDesc(String taskGroupDesc) {
- int maxJobLenth;
+ int maxJobLength;
try {
- String maxJobLenthString = CarbonProperties.getInstance()
+ String maxJobLengthString = CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_INDEX_SERVER_JOBNAME_LENGTH ,
CarbonCommonConstants.CARBON_INDEX_SERVER_JOBNAME_LENGTH_DEFAULT);
- maxJobLenth = Integer.parseInt(maxJobLenthString);
+ maxJobLength = Integer.parseInt(maxJobLengthString);
} catch (Exception e) {
- String maxJobLenthString = CarbonProperties.getInstance()
+ String maxJobLengthString = CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_INDEX_SERVER_JOBNAME_LENGTH_DEFAULT);
- maxJobLenth = Integer.parseInt(maxJobLenthString);
+ maxJobLength = Integer.parseInt(maxJobLengthString);
}
- if (taskGroupDesc.length() > maxJobLenth) {
- this.taskGroupDesc = taskGroupDesc.substring(0, maxJobLenth);
+ if (taskGroupDesc.length() > maxJobLength) {
+ this.taskGroupDesc = taskGroupDesc.substring(0, maxJobLength);
} else {
this.taskGroupDesc = taskGroupDesc;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/IndexJob.java b/core/src/main/java/org/apache/carbondata/core/index/IndexJob.java
index a89a661..608f989 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/IndexJob.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/IndexJob.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
/**
* Distributable index job to execute the #IndexInputFormat in cluster. it prunes the
- * indexes distributably and returns the final blocklet list
+ * indexes distributed and returns the final blocklet list
*/
public interface IndexJob extends Serializable {
diff --git a/core/src/main/java/org/apache/carbondata/core/index/IndexStoreManager.java b/core/src/main/java/org/apache/carbondata/core/index/IndexStoreManager.java
index f38c33f..2a69e17 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/IndexStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/IndexStoreManager.java
@@ -72,7 +72,7 @@ public final class IndexStoreManager {
private Map<String, List<TableIndex>> allIndexes = new ConcurrentHashMap<>();
/**
- * Contains the table name to the tablepath mapping.
+ * Contains the table name to the table path mapping.
*/
private Map<String, String> tablePathMap = new ConcurrentHashMap<>();
@@ -135,7 +135,7 @@ public final class IndexStoreManager {
tableIndices = allIndexes.get(tableId);
}
}
- // in case of fileformat or sdk, when table is dropped or schema is changed the indexes are
+ // in case of file format or sdk, when table is dropped or schema is changed the indexes are
// not cleared, they need to be cleared by using API, so compare the columns, if not same, clear
// the indexes on that table
if (allIndexes.size() > 0 && !CollectionUtils.isEmpty(allIndexes.get(tableId))
@@ -595,7 +595,7 @@ public final class IndexStoreManager {
}
private boolean hasCGIndex(CarbonTable carbonTable) throws IOException {
- // In case of spark file format flow, carbontable will be null
+ // In case of spark file format flow, carbon table will be null
if (null == carbonTable) {
return false;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/TableIndex.java b/core/src/main/java/org/apache/carbondata/core/index/TableIndex.java
index 7aa5645..0da8f79 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/TableIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/TableIndex.java
@@ -53,7 +53,7 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.IndexSchema;
import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.events.Event;
@@ -114,14 +114,14 @@ public final class TableIndex extends OperationEventListener {
/**
* Pass the valid segments and prune the index using filter expression
*
- * @param allsegments
+ * @param allSegments
* @param filter
* @return
*/
- public List<ExtendedBlocklet> prune(List<Segment> allsegments, final IndexFilter filter,
+ public List<ExtendedBlocklet> prune(List<Segment> allSegments, final IndexFilter filter,
final List<PartitionSpec> partitions) throws IOException {
final List<ExtendedBlocklet> blocklets = new ArrayList<>();
- List<Segment> segments = getCarbonSegments(allsegments);
+ List<Segment> segments = getCarbonSegments(allSegments);
final Map<Segment, List<Index>> indexes;
boolean isFilterPresent = filter != null && !filter.isEmpty();
Set<Path> partitionLocations = getPartitionLocations(partitions);
@@ -170,9 +170,9 @@ public final class TableIndex extends OperationEventListener {
return extendedBlocklets;
}
- private List<Segment> getCarbonSegments(List<Segment> allsegments) {
+ private List<Segment> getCarbonSegments(List<Segment> allSegments) {
List<Segment> segments = new ArrayList<>();
- for (Segment segment : allsegments) {
+ for (Segment segment : allSegments) {
if (segment.isCarbonSegment()) {
segments.add(segment);
}
@@ -215,46 +215,45 @@ public final class TableIndex extends OperationEventListener {
SegmentProperties segmentProperties =
segmentPropertiesFetcher.getSegmentProperties(segment, partitionLocations);
if (filter.isResolvedOnSegment(segmentProperties)) {
- FilterExecuter filterExecuter;
+ FilterExecutor filterExecutor;
if (!isExternalSegment) {
- filterExecuter = FilterUtil
- .getFilterExecuterTree(filter.getResolver(), segmentProperties, null,
+ filterExecutor = FilterUtil
+ .getFilterExecutorTree(filter.getResolver(), segmentProperties, null,
table.getMinMaxCacheColumns(segmentProperties), false);
} else {
- filterExecuter = FilterUtil
- .getFilterExecuterTree(filter.getExternalSegmentResolver(), segmentProperties, null,
+ filterExecutor = FilterUtil
+ .getFilterExecutorTree(filter.getExternalSegmentResolver(), segmentProperties, null,
table.getMinMaxCacheColumns(segmentProperties), false);
}
for (Index index : indexes.get(segment)) {
if (!isExternalSegment) {
pruneBlocklets.addAll(index
- .prune(filter.getResolver(), segmentProperties, filterExecuter, this.table));
+ .prune(filter.getResolver(), segmentProperties, filterExecutor, this.table));
} else {
pruneBlocklets.addAll(index
- .prune(filter.getExternalSegmentResolver(), segmentProperties, filterExecuter,
+ .prune(filter.getExternalSegmentResolver(), segmentProperties, filterExecutor,
this.table));
}
}
} else {
- FilterExecuter filterExecuter;
+ FilterExecutor filterExecutor;
Expression expression = filter.getExpression();
if (!isExternalSegment) {
- filterExecuter = FilterUtil.getFilterExecuterTree(
+ filterExecutor = FilterUtil.getFilterExecutorTree(
new IndexFilter(segmentProperties, table, expression).getResolver(),
segmentProperties, null, table.getMinMaxCacheColumns(segmentProperties), false);
} else {
- filterExecuter = FilterUtil.getFilterExecuterTree(
+ filterExecutor = FilterUtil.getFilterExecutorTree(
new IndexFilter(segmentProperties, table, expression).getExternalSegmentResolver(),
segmentProperties, null, table.getMinMaxCacheColumns(segmentProperties), false);
}
for (Index index : indexes.get(segment)) {
if (!isExternalSegment) {
- pruneBlocklets.addAll(index
- .prune(filter.getExpression(), segmentProperties, table, filterExecuter));
+ pruneBlocklets.addAll(index.prune(
+ filter.getExpression(), segmentProperties, table, filterExecutor));
} else {
- pruneBlocklets.addAll(index
- .prune(filter.getExternalSegmentFilter(), segmentProperties, table,
- filterExecuter));
+ pruneBlocklets.addAll(index.prune(
+ filter.getExternalSegmentFilter(), segmentProperties, table, filterExecutor));
}
}
}
@@ -324,8 +323,8 @@ public final class TableIndex extends OperationEventListener {
}
}
if (prev == 0 || prev != eachSegmentIndexList.size()) {
- // if prev == 0. Add a segment's all indexess
- // eachSegmentIndexList.size() != prev, adding the last remaining indexess of this segment
+ // if prev == 0. Add a segment's all indexes
+ // eachSegmentIndexList.size() != prev, adding the last remaining indexes of this segment
segmentIndexGroupList
.add(new SegmentIndexGroup(segment, prev, eachSegmentIndexList.size() - 1));
}
@@ -338,9 +337,9 @@ public final class TableIndex extends OperationEventListener {
throw new RuntimeException(" not all the files processed ");
}
if (indexListForEachThread.size() < numOfThreadsForPruning) {
- // If the total indexess fitted in lesser number of threads than numOfThreadsForPruning.
- // Launch only that many threads where indexess are fitted while grouping.
- LOG.info("indexess is distributed in " + indexListForEachThread.size() + " threads");
+ // If the total indexes fitted in lesser number of threads than numOfThreadsForPruning.
+ // Launch only that many threads where indexes are fitted while grouping.
+ LOG.info("indexes is distributed in " + indexListForEachThread.size() + " threads");
numOfThreadsForPruning = indexListForEachThread.size();
}
LOG.info(
@@ -365,14 +364,14 @@ public final class TableIndex extends OperationEventListener {
Segment segment = segmentIndexGroup.getSegment();
boolean isExternalSegment = segment.getSegmentPath() != null;
if (filter.isResolvedOnSegment(segmentProperties)) {
- FilterExecuter filterExecuter;
+ FilterExecutor filterExecutor;
if (!isExternalSegment) {
- filterExecuter = FilterUtil
- .getFilterExecuterTree(filter.getResolver(), segmentProperties, null,
+ filterExecutor = FilterUtil
+ .getFilterExecutorTree(filter.getResolver(), segmentProperties, null,
table.getMinMaxCacheColumns(segmentProperties), false);
} else {
- filterExecuter = FilterUtil
- .getFilterExecuterTree(filter.getExternalSegmentResolver(), segmentProperties,
+ filterExecutor = FilterUtil
+ .getFilterExecutorTree(filter.getExternalSegmentResolver(), segmentProperties,
null, table.getMinMaxCacheColumns(segmentProperties), false);
}
for (int i = segmentIndexGroup.getFromIndex();
@@ -380,10 +379,10 @@ public final class TableIndex extends OperationEventListener {
List<Blocklet> dmPruneBlocklets;
if (!isExternalSegment) {
dmPruneBlocklets = indexList.get(i)
- .prune(filter.getResolver(), segmentProperties, filterExecuter, table);
+ .prune(filter.getResolver(), segmentProperties, filterExecutor, table);
} else {
dmPruneBlocklets = indexList.get(i)
- .prune(filter.getExternalSegmentResolver(), segmentProperties, filterExecuter,
+ .prune(filter.getExternalSegmentResolver(), segmentProperties, filterExecutor,
table);
}
pruneBlocklets.addAll(addSegmentId(
@@ -392,13 +391,13 @@ public final class TableIndex extends OperationEventListener {
}
} else {
Expression filterExpression = filter.getNewCopyOfExpression();
- FilterExecuter filterExecuter;
+ FilterExecutor filterExecutor;
if (!isExternalSegment) {
- filterExecuter = FilterUtil.getFilterExecuterTree(
+ filterExecutor = FilterUtil.getFilterExecutorTree(
new IndexFilter(segmentProperties, table, filterExpression).getResolver(),
segmentProperties, null, table.getMinMaxCacheColumns(segmentProperties), false);
} else {
- filterExecuter = FilterUtil.getFilterExecuterTree(
+ filterExecutor = FilterUtil.getFilterExecutorTree(
new IndexFilter(segmentProperties, table, filterExpression)
.getExternalSegmentResolver(), segmentProperties, null,
table.getMinMaxCacheColumns(segmentProperties), false);
@@ -408,11 +407,11 @@ public final class TableIndex extends OperationEventListener {
List<Blocklet> dmPruneBlocklets;
if (!isExternalSegment) {
dmPruneBlocklets = indexList.get(i)
- .prune(filterExpression, segmentProperties, table, filterExecuter);
+ .prune(filterExpression, segmentProperties, table, filterExecutor);
} else {
dmPruneBlocklets = indexList.get(i)
.prune(filter.getExternalSegmentFilter(), segmentProperties, table,
- filterExecuter);
+ filterExecutor);
}
pruneBlocklets.addAll(addSegmentId(
blockletDetailsFetcher.getExtendedBlocklets(dmPruneBlocklets, segment),
@@ -463,18 +462,18 @@ public final class TableIndex extends OperationEventListener {
/**
* This is used for making the index distributable.
- * It takes the valid segments and returns all the indexess as distributable objects so that
+ * It takes the valid segments and returns all the indexes as distributable objects so that
* it can be distributed across machines.
*
* @return
*/
- public List<IndexInputSplit> toDistributable(List<Segment> allsegments) {
- List<IndexInputSplit> distributables = new ArrayList<>();
- List<Segment> segments = getCarbonSegments(allsegments);
+ public List<IndexInputSplit> toDistributable(List<Segment> allSegments) {
+ List<IndexInputSplit> distributableList = new ArrayList<>();
+ List<Segment> segments = getCarbonSegments(allSegments);
for (Segment segment : segments) {
- distributables.addAll(indexFactory.toDistributable(segment));
+ distributableList.addAll(indexFactory.toDistributable(segment));
}
- return distributables;
+ return distributableList;
}
public IndexInputSplitWrapper toDistributableSegment(Segment segment, String uniqueId)
@@ -483,7 +482,7 @@ public final class TableIndex extends OperationEventListener {
}
/**
- * This method returns all the indexess corresponding to the distributable object
+ * This method returns all the indexes corresponding to the distributable object
*
* @param distributable
* @return
@@ -508,12 +507,12 @@ public final class TableIndex extends OperationEventListener {
Set<Path> partitionsToPrune = getPartitionLocations(partitions);
SegmentProperties segmentProperties = segmentPropertiesFetcher
.getSegmentProperties(distributable.getSegment(), partitionsToPrune);
- FilterExecuter filterExecuter = FilterUtil
- .getFilterExecuterTree(filterExp, segmentProperties,
+ FilterExecutor filterExecutor = FilterUtil
+ .getFilterExecutorTree(filterExp, segmentProperties,
null, table.getMinMaxCacheColumns(segmentProperties),
false);
for (Index index : indices) {
- blocklets.addAll(index.prune(filterExp, segmentProperties, filterExecuter, table));
+ blocklets.addAll(index.prune(filterExp, segmentProperties, filterExecutor, table));
}
BlockletSerializer serializer = new BlockletSerializer();
String writePath =
@@ -526,10 +525,10 @@ public final class TableIndex extends OperationEventListener {
ExtendedBlocklet detailedBlocklet = blockletDetailsFetcher
.getExtendedBlocklet(blocklet, distributable.getSegment());
if (indexFactory.getIndexLevel() == IndexLevel.FG) {
- String blockletwritePath =
+ String blockletWritePath =
writePath + CarbonCommonConstants.FILE_SEPARATOR + System.nanoTime();
- detailedBlocklet.setIndexWriterPath(blockletwritePath);
- serializer.serializeBlocklet((FineGrainBlocklet) blocklet, blockletwritePath);
+ detailedBlocklet.setIndexWriterPath(blockletWritePath);
+ serializer.serializeBlocklet((FineGrainBlocklet) blocklet, blockletWritePath);
}
detailedBlocklet.setSegment(distributable.getSegment());
detailedBlocklets.add(detailedBlocklet);
@@ -538,7 +537,7 @@ public final class TableIndex extends OperationEventListener {
}
/**
- * Clear only the indexess of the segments
+ * Clear only the indexes of the segments
* @param segmentIds list of segmentIds to be cleared from cache.
*/
public void clear(List<String> segmentIds) {
@@ -559,8 +558,8 @@ public final class TableIndex extends OperationEventListener {
/**
* delete only the index of the segments
*/
- public void deleteIndexData(List<Segment> allsegments) throws IOException {
- List<Segment> segments = getCarbonSegments(allsegments);
+ public void deleteIndexData(List<Segment> allSegments) throws IOException {
+ List<Segment> segments = getCarbonSegments(allSegments);
for (Segment segment: segments) {
indexFactory.deleteIndexData(segment);
}
@@ -596,15 +595,15 @@ public final class TableIndex extends OperationEventListener {
/**
* Prune the index of the given segments and return the Map of blocklet path and row count
*
- * @param allsegments
+ * @param allSegments
* @param partitions
* @return
* @throws IOException
*/
- public Map<String, Long> getBlockRowCount(List<Segment> allsegments,
+ public Map<String, Long> getBlockRowCount(List<Segment> allSegments,
final List<PartitionSpec> partitions, TableIndex defaultIndex)
throws IOException {
- List<Segment> segments = getCarbonSegments(allsegments);
+ List<Segment> segments = getCarbonSegments(allSegments);
Map<String, Long> blockletToRowCountMap = new HashMap<>();
for (Segment segment : segments) {
List<CoarseGrainIndex> indexes = defaultIndex.getIndexFactory().getIndexes(segment);
@@ -624,11 +623,11 @@ public final class TableIndex extends OperationEventListener {
/**
* Get the mapping of blocklet path and row count for all blocks. This method skips the
- * validation of partition info for countStar job with indexserver enabled.
+ * validation of partition info for countStar job with index server enabled.
*/
- public Map<String, Long> getBlockRowCount(TableIndex defaultIndex, List<Segment> allsegments,
+ public Map<String, Long> getBlockRowCount(TableIndex defaultIndex, List<Segment> allSegments,
final List<PartitionSpec> partitions) throws IOException {
- List<Segment> segments = getCarbonSegments(allsegments);
+ List<Segment> segments = getCarbonSegments(allSegments);
Map<String, Long> blockletToRowCountMap = new HashMap<>();
for (Segment segment : segments) {
List<CoarseGrainIndex> indexes = defaultIndex.getIndexFactory().getIndexes(segment);
@@ -642,14 +641,14 @@ public final class TableIndex extends OperationEventListener {
/**
* Prune the index of the given segments and return the Map of blocklet path and row count
*
- * @param allsegments
+ * @param allSegments
* @param partitions
* @return
* @throws IOException
*/
- public long getRowCount(List<Segment> allsegments, final List<PartitionSpec> partitions,
+ public long getRowCount(List<Segment> allSegments, final List<PartitionSpec> partitions,
TableIndex defaultIndex) throws IOException {
- List<Segment> segments = getCarbonSegments(allsegments);
+ List<Segment> segments = getCarbonSegments(allSegments);
long totalRowCount = 0L;
for (Segment segment : segments) {
List<CoarseGrainIndex> indexes = defaultIndex.getIndexFactory().getIndexes(segment);
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/CacheableIndex.java b/core/src/main/java/org/apache/carbondata/core/index/dev/CacheableIndex.java
index 435dc2a..903bef2 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/CacheableIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/CacheableIndex.java
@@ -44,15 +44,15 @@ public interface CacheableIndex {
BlockletIndexWrapper blockletIndexWrapper) throws IOException;
/**
- * Get all the uncached distributables from the list.
+ * Get all the uncached distributableList from the list.
*
- * @param distributables
+ * @param distributableList
* @return
*/
- List<IndexInputSplit> getAllUncachedDistributables(List<IndexInputSplit> distributables)
+ List<IndexInputSplit> getAllUncached(List<IndexInputSplit> distributableList)
throws IOException;
- List<IndexInputSplit> getAllUncachedDistributables(
+ List<IndexInputSplit> getAllUncached(
List<Segment> segments, IndexExprWrapper indexExprWrapper) throws IOException;
void updateSegmentIndex(
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/Index.java b/core/src/main/java/org/apache/carbondata/core/index/dev/Index.java
index 0270ab1..fb5a792 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/Index.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/Index.java
@@ -28,7 +28,7 @@ import org.apache.carbondata.core.indexstore.Blocklet;
import org.apache.carbondata.core.indexstore.PartitionSpec;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
/**
@@ -47,24 +47,24 @@ public interface Index<T extends Blocklet> {
* It returns the list of blocklets where these filters can exist.
*/
List<T> prune(FilterResolverIntf filterExp, SegmentProperties segmentProperties,
- FilterExecuter filterExecuter, CarbonTable table) throws IOException;
+ FilterExecutor filterExecutor, CarbonTable table) throws IOException;
/**
* Prune the table with filter expression. It returns the list of
* blocklets where these filters can exist.
*/
List<T> prune(Expression filter, SegmentProperties segmentProperties,
- CarbonTable carbonTable, FilterExecuter filterExecuter);
+ CarbonTable carbonTable, FilterExecutor filterExecutor);
/**
* Prune the indexes for finding the row count. It returns a Map of
- * blockletpath and the row count
+ * blocklet path and the row count
*/
long getRowCount(Segment segment, List<PartitionSpec> partitions);
/**
* Prune the indexes for finding the row count for each block. It returns a Map of
- * blockletpath and the row count
+ * blocklet path and the row count
*/
Map<String, Long> getRowCountForEachBlock(Segment segment, List<PartitionSpec> partitions,
Map<String, Long> blockletToRowCountMap);
@@ -93,7 +93,7 @@ public interface Index<T extends Blocklet> {
/**
* Returns number of records information that are stored in Index.
* Driver multi-thread block pruning happens based on the number of rows in Index.
- * So Indexs can have multiple rows if they store information of multiple files.
+ * So Indexes can have multiple rows if they store information of multiple files.
* so, this number of entries is used to represent how many files information a Index contains
*/
int getNumberOfEntries();
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/IndexFactory.java b/core/src/main/java/org/apache/carbondata/core/index/dev/IndexFactory.java
index a014102..6299a73 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/IndexFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/IndexFactory.java
@@ -181,7 +181,7 @@ public abstract class IndexFactory<T extends Index> {
/**
* whether to block operation on corresponding table or column.
- * For example, bloomfilter index will block changing datatype for bloomindex column.
+ * For example, bloom filter index will block changing datatype for bloom index column.
* By default it will not block any operation.
*
* @param operation table operation
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndex.java b/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndex.java
index 3fb2d99..8072d8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndex.java
@@ -29,7 +29,7 @@ import org.apache.carbondata.core.indexstore.Blocklet;
import org.apache.carbondata.core.indexstore.PartitionSpec;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
/**
* Index for Coarse Grain level, see {@link org.apache.carbondata.core.index.IndexLevel#CG}
@@ -40,7 +40,7 @@ public abstract class CoarseGrainIndex implements Index<Blocklet> {
@Override
public List<Blocklet> prune(Expression expression, SegmentProperties segmentProperties,
- CarbonTable carbonTable, FilterExecuter filterExecuter) {
+ CarbonTable carbonTable, FilterExecutor filterExecutor) {
throw new UnsupportedOperationException("Filter expression not supported");
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndexFactory.java b/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndexFactory.java
index 57f3d73..baf15c2 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndexFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/cgindex/CoarseGrainIndexFactory.java
@@ -27,9 +27,9 @@ import org.apache.carbondata.core.metadata.schema.table.IndexSchema;
/**
* Factory for {@link CoarseGrainIndex}
* 1. Any filter query which hits the table with index will call prune method of CGindex.
- * 2. The prune method of CGindex return list Blocklet , these blocklets contain the
+ * 2. The prune method of CGIndex return list Blocklet , these blocklets contain the
* information of block and blocklet.
- * 3. Based on the splits scanrdd schedule the tasks.
+ * 3. Based on the splits scanRdd schedule the tasks.
*/
@InterfaceAudience.Developer("Index")
@InterfaceStability.Evolving
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/AndIndexExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/AndIndexExprWrapper.java
index 0dbdbd9..a29734a 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/AndIndexExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/AndIndexExprWrapper.java
@@ -126,7 +126,7 @@ public class AndIndexExprWrapper extends IndexExprWrapper {
}
@Override
- public IndexExprWrapper getRightIndexWrapprt() {
+ public IndexExprWrapper getRightIndexWrapper() {
return right;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapper.java
index 04867dd..02bb353 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapper.java
@@ -53,7 +53,7 @@ public abstract class IndexExprWrapper implements Serializable {
List<PartitionSpec> partitionsToPrune) throws IOException;
/**
- * It is used in case on distributable index. First using job it gets all blockets from all
+ * It is used in case on distributable index. First using job it gets all blocklets from all
* related indexes. These blocklets are passed to this method to apply expression.
*
* @param blocklets
@@ -79,7 +79,7 @@ public abstract class IndexExprWrapper implements Serializable {
throws IOException;
/**
- * Each leaf node is identified by uniqueid, so if user wants the underlying filter expression for
+ * Each leaf node is identified by uniqueId, so if user wants the underlying filter expression for
* any leaf node then this method can be used.
* @param uniqueId
* @return
@@ -99,7 +99,7 @@ public abstract class IndexExprWrapper implements Serializable {
/**
* get the right index wrapper
*/
- public abstract IndexExprWrapper getRightIndexWrapprt();
+ public abstract IndexExprWrapper getRightIndexWrapper();
/**
* Convert segment to distributable object.
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapperImpl.java b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapperImpl.java
index 16cc13b..62c79e6 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapperImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexExprWrapperImpl.java
@@ -111,7 +111,7 @@ public class IndexExprWrapperImpl extends IndexExprWrapper {
}
@Override
- public IndexExprWrapper getRightIndexWrapprt() {
+ public IndexExprWrapper getRightIndexWrapper() {
return null;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexWrapperSimpleInfo.java b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexWrapperSimpleInfo.java
index 689028e..cbb923d 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexWrapperSimpleInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/IndexWrapperSimpleInfo.java
@@ -56,11 +56,11 @@ public class IndexWrapperSimpleInfo {
} else if (indexExprWrapper instanceof AndIndexExprWrapper) {
return new IndexWrapperSimpleInfo(WrapperType.AND,
fromIndexWrapper(indexExprWrapper.getLeftIndexWrapper()),
- fromIndexWrapper(indexExprWrapper.getRightIndexWrapprt()));
+ fromIndexWrapper(indexExprWrapper.getRightIndexWrapper()));
} else {
return new IndexWrapperSimpleInfo(WrapperType.OR,
fromIndexWrapper(indexExprWrapper.getLeftIndexWrapper()),
- fromIndexWrapper(indexExprWrapper.getRightIndexWrapprt()));
+ fromIndexWrapper(indexExprWrapper.getRightIndexWrapper()));
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/OrIndexExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/OrIndexExprWrapper.java
index f419513..ec714de 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/expr/OrIndexExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/expr/OrIndexExprWrapper.java
@@ -119,7 +119,7 @@ public class OrIndexExprWrapper extends IndexExprWrapper {
}
@Override
- public IndexExprWrapper getRightIndexWrapprt() {
+ public IndexExprWrapper getRightIndexWrapper() {
return right;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndex.java b/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndex.java
index a0b5cea..f98122e 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndex.java
@@ -28,7 +28,7 @@ import org.apache.carbondata.core.index.dev.Index;
import org.apache.carbondata.core.indexstore.PartitionSpec;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
/**
* Index for Fine Grain level, see {@link org.apache.carbondata.core.index.IndexLevel#FG}
@@ -39,7 +39,7 @@ public abstract class FineGrainIndex implements Index<FineGrainBlocklet> {
@Override
public List<FineGrainBlocklet> prune(Expression filter, SegmentProperties segmentProperties,
- CarbonTable carbonTable, FilterExecuter filterExecuter) {
+ CarbonTable carbonTable, FilterExecutor filterExecutor) {
throw new UnsupportedOperationException("Filter expression not supported");
}
diff --git a/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndexFactory.java b/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndexFactory.java
index e7fd935..2b0674d 100644
--- a/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndexFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/index/dev/fgindex/FineGrainIndexFactory.java
@@ -27,13 +27,13 @@ import org.apache.carbondata.core.metadata.schema.table.IndexSchema;
/**
* Factory for {@link FineGrainIndex}
*
- * 1. Any filter query which hits the table with index will call prune method of FGindex.
- * 2. The prune method of FGindex return list FineGrainBlocklet , these blocklets contain the
- * information of block, blocklet, page and rowids information as well.
- * 3. The pruned blocklets are internally wriitten to file and returns only the block ,
+ * 1. Any filter query which hits the table with index will call prune method of FGIndex.
+ * 2. The prune method of FGIndex return list FineGrainBlocklet , these blocklets contain the
+ * information of block, blocklet, page and rowIds information as well.
+ * 3. The pruned blocklets are internally written to file and returns only the block ,
* blocklet and filepath information as part of Splits.
- * 4. Based on the splits scanrdd schedule the tasks.
- * 5. In filterscanner we check the indexwriterpath from split and reNoteads the
+ * 4. Based on the splits scan rdd schedule the tasks.
+ * 5. In filter scanner we check the index writer path from split and reads the
* bitset if exists. And pass this bitset as input to it.
*/
@InterfaceAudience.Developer("Index")
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailsFetcher.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailsFetcher.java
index 03c137e..28d9769 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailsFetcher.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDetailsFetcher.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.fs.Path;
public interface BlockletDetailsFetcher {
/**
- * Get the blocklet detail information based on blockletid, blockid and segmentId.
+ * Get the blocklet detail information based on blockletId, blockId and segmentId.
*
* @param blocklets
* @param segment
@@ -42,7 +42,7 @@ public interface BlockletDetailsFetcher {
throws IOException;
/**
- * Get the blocklet detail information based on blockletid, blockid and segmentId.
+ * Get the blocklet detail information based on blockletId, blockId and segmentId.
*
* @param blocklet
* @param segment
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletIndexStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletIndexStore.java
index e9231bf..7e17673 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletIndexStore.java
@@ -174,7 +174,7 @@ public class BlockletIndexStore
new ArrayList<>(tableSegmentUniqueIdentifiers.size());
List<TableBlockIndexUniqueIdentifierWrapper> missedIdentifiersWrapper = new ArrayList<>();
BlockletIndexWrapper blockletIndexWrapper = null;
- // Get the indexes for each indexfile from cache.
+ // Get the indexes for each index file from cache.
try {
for (TableBlockIndexUniqueIdentifierWrapper
identifierWrapper : tableSegmentUniqueIdentifiers) {
@@ -278,7 +278,7 @@ public class BlockletIndexStore
/**
* Below method will be used to load the segment of segments
* One segment may have multiple task , so table segment will be loaded
- * based on task id and will return the map of taksId to table segment
+ * based on task id and will return the map of taskId to table segment
* map
*
* @return map of taks id to segment mapping
@@ -317,12 +317,12 @@ public class BlockletIndexStore
private synchronized Object addAndGetSegmentLock(String uniqueIdentifier) {
// get the segment lock object if it is present then return
// otherwise add the new lock and return
- Object segmentLoderLockObject = segmentLockMap.get(uniqueIdentifier);
- if (null == segmentLoderLockObject) {
- segmentLoderLockObject = new Object();
- segmentLockMap.put(uniqueIdentifier, segmentLoderLockObject);
+ Object segmentLockObject = segmentLockMap.get(uniqueIdentifier);
+ if (null == segmentLockObject) {
+ segmentLockObject = new Object();
+ segmentLockMap.put(uniqueIdentifier, segmentLockObject);
}
- return segmentLoderLockObject;
+ return segmentLockObject;
}
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlocklet.java b/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlocklet.java
index 37aa60d..edb921b 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlocklet.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlocklet.java
@@ -158,9 +158,9 @@ public class ExtendedBlocklet extends Blocklet {
}
/**
- * Method to seralize extended blocklet and inputsplit for index server
+ * Method to serialize extended blocklet and input split for index server
* DataFormat
- * <Extended Blocklet data><Carbon input split serializeData lenght><CarbonInputSplitData>
+ * <Extended Blocklet data><Carbon input split serializeData length><CarbonInputSplitData>
* @param out
* @param uniqueLocation
* @throws IOException
@@ -189,7 +189,7 @@ public class ExtendedBlocklet extends Blocklet {
inputSplit.setFilePath(null);
inputSplit.setBucketId(null);
if (inputSplit.isBlockCache()) {
- inputSplit.updateFooteroffset();
+ inputSplit.updateFooterOffset();
inputSplit.updateBlockLength();
inputSplit.setWriteDetailInfo(false);
}
@@ -201,7 +201,7 @@ public class ExtendedBlocklet extends Blocklet {
}
/**
- * Method to deseralize extended blocklet and inputsplit for index server
+ * Method to deserialize extended blocklet and input split for index server
* @param in
* @param locations
* @param tablePath
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapper.java b/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapper.java
index b9fb4b5..67d2eb8 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapper.java
@@ -84,7 +84,7 @@ public class ExtendedBlockletWrapper implements Writable, Serializable {
final CarbonFile carbonFile = FileFactory.getCarbonFile(folderPath);
boolean isFolderExists = true;
if (!carbonFile.isFileExist()) {
- LOGGER.warn("Folder:" + folderPath + "doesn't exists, data will be send through netwrok");
+ LOGGER.warn("Folder:" + folderPath + "doesn't exists, data will be send through network");
isFolderExists = false;
}
if (isFolderExists) {
@@ -164,7 +164,7 @@ public class ExtendedBlockletWrapper implements Writable, Serializable {
}
/**
- * deseralize the blocklet data from file or stream
+ * deserialize the blocklet data from file or stream
* data format
* <number of splits><number of unique location[short]><locations><serialize data len><data>
*
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapperContainer.java b/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapperContainer.java
index 19b0039..9816181 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapperContainer.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/ExtendedBlockletWrapperContainer.java
@@ -62,7 +62,7 @@ public class ExtendedBlockletWrapperContainer implements Writable {
this.isFallbackJob = isFallbackJob;
}
- public List<ExtendedBlocklet> getExtendedBlockets(String tablePath, String queryId,
+ public List<ExtendedBlocklet> getExtendedBlocklets(String tablePath, String queryId,
boolean isCountJob) throws IOException {
if (!isFallbackJob) {
int numOfThreads = CarbonProperties.getNumOfThreadsForPruning();
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifier.java b/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifier.java
index 0b2b3e3..7d422e7 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifier.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifier.java
@@ -23,7 +23,7 @@ import java.util.Objects;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
/**
- * Class holds the indexFile information to uniquely identitify the carbon index
+ * Class holds the indexFile information to uniquely identify the carbon index
*/
public class TableBlockIndexUniqueIdentifier implements Serializable {
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
index 4fd0ebe..5e0f579 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/UnsafeMemoryDMStore.java
@@ -97,7 +97,7 @@ public class UnsafeMemoryDMStore extends AbstractMemoryDMStore {
* LO: Last Offset
*
* Read:
- * FD: Read directly based of byte postion added in CarbonRowSchema
+ * FD: Read directly based of byte position added in CarbonRowSchema
*
* VD: Read based on below logic
* if not last variable column schema
@@ -144,7 +144,7 @@ public class UnsafeMemoryDMStore extends AbstractMemoryDMStore {
bytePosition += CarbonCommonConstants.INT_SIZE_IN_BYTE;
// start byte position of variable length data
int varColPosition = bytePosition + CarbonCommonConstants.INT_SIZE_IN_BYTE;
- // current position refers to current byte postion in memory block
+ // current position refers to current byte position in memory block
int currentPosition;
for (int i = 0; i < schema.length; i++) {
switch (schema[i].getSchemaType()) {
@@ -167,11 +167,11 @@ public class UnsafeMemoryDMStore extends AbstractMemoryDMStore {
break;
}
}
- // writting the last offset
+ // writing the last offset
getUnsafe()
.putInt(memoryBlock.getBaseObject(), memoryBlock.getBaseOffset() + pointer + bytePosition,
varColPosition);
- // after adding last offset increament the length by 4 bytes as last postion
+ // after adding last offset increment the length by 4 bytes as last position
// written as INT
runningLength += CarbonCommonConstants.INT_SIZE_IN_BYTE;
pointers[rowCount++] = pointer;
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockIndex.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockIndex.java
index 0ad7940..6126636 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockIndex.java
@@ -58,7 +58,7 @@ import org.apache.carbondata.core.profiler.ExplainCollector;
import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.filter.FilterExpressionProcessor;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.executer.ImplicitColumnFilterExecutor;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.util.BlockletIndexUtil;
@@ -133,7 +133,7 @@ public class BlockIndex extends CoarseGrainIndex
this.isPartitionTable = blockletIndexModel.getCarbonTable().isHivePartitionTable();
if (this.isPartitionTable || !blockletIndexModel.getCarbonTable().isTransactionalTable() ||
blockletIndexModel.getCarbonTable().isSupportFlatFolder() ||
- // if the segment data is written in tablepath then no need to store whole path of file.
+ // if the segment data is written in table path then no need to store whole path of file.
!blockletIndexModel.getFilePath().startsWith(
blockletIndexModel.getCarbonTable().getTablePath())) {
filePath = FilenameUtils.getFullPathNoEndSeparator(path)
@@ -444,7 +444,7 @@ public class BlockIndex extends CoarseGrainIndex
}
// create the segment directory path
String tablePath = segmentPropertiesWrapper.getTableIdentifier().getTablePath();
- String segmentId = getTableTaskInfo(SUMMARY_SEGMENTID);
+ String segmentId = getTableTaskInfo(SUMMARY_SEGMENT_ID);
return CarbonTablePath.getSegmentPath(tablePath, segmentId);
}
@@ -460,7 +460,7 @@ public class BlockIndex extends CoarseGrainIndex
// write the task summary info to unsafe memory store
if (null != summaryRow) {
summaryRow.setByteArray(fileName, SUMMARY_INDEX_FILE_NAME);
- summaryRow.setByteArray(segmentId, SUMMARY_SEGMENTID);
+ summaryRow.setByteArray(segmentId, SUMMARY_SEGMENT_ID);
if (filePath.length > 0) {
summaryRow.setByteArray(filePath, SUMMARY_INDEX_PATH);
}
@@ -560,12 +560,12 @@ public class BlockIndex extends CoarseGrainIndex
@Override
public boolean isScanRequired(FilterResolverIntf filterExp) {
- FilterExecuter filterExecuter = FilterUtil.getFilterExecuterTree(
+ FilterExecutor filterExecutor = FilterUtil.getFilterExecutorTree(
filterExp, getSegmentProperties(), null, getMinMaxCacheColumns(), false);
IndexRow unsafeRow = taskSummaryDMStore
.getIndexRow(getTaskSummarySchema(), taskSummaryDMStore.getRowCount() - 1);
boolean isScanRequired = FilterExpressionProcessor
- .isScanRequired(filterExecuter, getMinMaxValue(unsafeRow, TASK_MAX_VALUES_INDEX),
+ .isScanRequired(filterExecutor, getMinMaxValue(unsafeRow, TASK_MAX_VALUES_INDEX),
getMinMaxValue(unsafeRow, TASK_MIN_VALUES_INDEX),
getMinMaxFlag(unsafeRow, TASK_MIN_MAX_FLAG));
if (isScanRequired) {
@@ -584,8 +584,8 @@ public class BlockIndex extends CoarseGrainIndex
*/
protected short getBlockletNumOfEntry(int index) {
final byte[] bytes = getBlockletRowCountForEachBlock();
- // if the segment data is written in tablepath
- // then the reuslt of getBlockletRowCountForEachBlock will be empty.
+ // if the segment data is written in table path
+ // then the result of getBlockletRowCountForEachBlock will be empty.
if (bytes.length == 0) {
return 0;
} else {
@@ -657,7 +657,7 @@ public class BlockIndex extends CoarseGrainIndex
return blockletToRowCountMap;
}
- private List<Blocklet> prune(FilterResolverIntf filterExp, FilterExecuter filterExecuter,
+ private List<Blocklet> prune(FilterResolverIntf filterExp, FilterExecutor filterExecutor,
SegmentProperties segmentProperties) {
if (memoryDMStore.getRowCount() == 0) {
return new ArrayList<>();
@@ -685,8 +685,8 @@ public class BlockIndex extends CoarseGrainIndex
// flag to be used for deciding whether use min/max in executor pruning for BlockletIndex
boolean useMinMaxForPruning = useMinMaxForExecutorPruning(filterExp);
if (!validateSegmentProperties(segmentProperties)) {
- filterExecuter = FilterUtil
- .getFilterExecuterTree(filterExp, getSegmentProperties(),
+ filterExecutor = FilterUtil
+ .getFilterExecutorTree(filterExp, getSegmentProperties(),
null, getMinMaxCacheColumns(), false);
}
// min and max for executor pruning
@@ -696,7 +696,7 @@ public class BlockIndex extends CoarseGrainIndex
String fileName = getFileNameWithFilePath(row, filePath);
short blockletId = getBlockletId(row);
boolean isValid =
- addBlockBasedOnMinMaxValue(filterExecuter, getMinMaxValue(row, MAX_VALUES_INDEX),
+ addBlockBasedOnMinMaxValue(filterExecutor, getMinMaxValue(row, MAX_VALUES_INDEX),
getMinMaxValue(row, MIN_VALUES_INDEX), minMaxFlag, fileName, blockletId);
if (isValid) {
blocklets.add(createBlocklet(row, fileName, blockletId, useMinMaxForPruning));
@@ -722,14 +722,14 @@ public class BlockIndex extends CoarseGrainIndex
@Override
public List<Blocklet> prune(Expression expression, SegmentProperties properties,
- CarbonTable carbonTable, FilterExecuter filterExecuter) {
+ CarbonTable carbonTable, FilterExecutor filterExecutor) {
return prune(new IndexFilter(properties, carbonTable, expression).getResolver(), properties,
- filterExecuter, carbonTable);
+ filterExecutor, carbonTable);
}
@Override
public List<Blocklet> prune(FilterResolverIntf filterExp, SegmentProperties segmentProperties,
- FilterExecuter filterExecuter, CarbonTable table) {
+ FilterExecutor filterExecutor, CarbonTable table) {
if (memoryDMStore.getRowCount() == 0) {
return new ArrayList<>();
}
@@ -738,7 +738,7 @@ public class BlockIndex extends CoarseGrainIndex
// segmentProperties.
// Its a temporary fix. The Interface Index.prune(FilterResolverIntf filterExp,
// SegmentProperties segmentProperties, List<PartitionSpec> partitions) should be corrected
- return prune(filterExp, filterExecuter, segmentProperties);
+ return prune(filterExp, filterExecutor, segmentProperties);
}
public boolean validatePartitionInfo(List<PartitionSpec> partitions) {
@@ -781,7 +781,7 @@ public class BlockIndex extends CoarseGrainIndex
/**
* select the blocks based on column min and max value
*
- * @param filterExecuter
+ * @param filterExecutor
* @param maxValue
* @param minValue
* @param minMaxFlag
@@ -789,10 +789,10 @@ public class BlockIndex extends CoarseGrainIndex
* @param blockletId
* @return
*/
- private boolean addBlockBasedOnMinMaxValue(FilterExecuter filterExecuter, byte[][] maxValue,
+ private boolean addBlockBasedOnMinMaxValue(FilterExecutor filterExecutor, byte[][] maxValue,
byte[][] minValue, boolean[] minMaxFlag, String filePath, int blockletId) {
BitSet bitSet = null;
- if (filterExecuter instanceof ImplicitColumnFilterExecutor) {
+ if (filterExecutor instanceof ImplicitColumnFilterExecutor) {
String uniqueBlockPath;
CarbonTable carbonTable = segmentPropertiesWrapper.getCarbonTable();
if (carbonTable.isHivePartitionTable()) {
@@ -810,10 +810,10 @@ public class BlockIndex extends CoarseGrainIndex
if (blockletId != -1) {
uniqueBlockPath = uniqueBlockPath + CarbonCommonConstants.FILE_SEPARATOR + blockletId;
}
- bitSet = ((ImplicitColumnFilterExecutor) filterExecuter)
+ bitSet = ((ImplicitColumnFilterExecutor) filterExecutor)
.isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath, minMaxFlag);
} else {
- bitSet = filterExecuter.isScanRequired(maxValue, minValue, minMaxFlag);
+ bitSet = filterExecutor.isScanRequired(maxValue, minValue, minMaxFlag);
}
if (!bitSet.isEmpty()) {
return true;
@@ -933,7 +933,7 @@ public class BlockIndex extends CoarseGrainIndex
CarbonCommonConstants.DEFAULT_CHARSET);
fileDetails[1] = new String(unsafeRow.getByteArray(SUMMARY_INDEX_FILE_NAME),
CarbonCommonConstants.DEFAULT_CHARSET);
- fileDetails[2] = new String(unsafeRow.getByteArray(SUMMARY_SEGMENTID),
+ fileDetails[2] = new String(unsafeRow.getByteArray(SUMMARY_SEGMENT_ID),
CarbonCommonConstants.DEFAULT_CHARSET);
return fileDetails;
} catch (Exception e) {
@@ -994,7 +994,7 @@ public class BlockIndex extends CoarseGrainIndex
}
/**
- * This method will ocnvert safe to unsafe memory DM store
+ * This method will convert safe to unsafe memory DM store
*
*/
public void convertToUnsafeDMStore() {
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
index ef665c5..f26f654 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
@@ -63,7 +63,7 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.IndexSchema;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.segmentmeta.SegmentColumnMetaDataInfo;
import org.apache.carbondata.core.segmentmeta.SegmentMetaDataInfo;
@@ -220,7 +220,7 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
/**
* Using blockLevel minmax values, identify if segment has to be added for further pruning and to
* load segment index info to cache
- * @param segment to be identified if needed for loading block indexs
+ * @param segment to be identified if needed for loading block indexes
* @param segmentMetaDataInfo list of block level min max values
* @param filter filter expression
* @param identifiers tableBlockIndexUniqueIdentifiers
@@ -290,11 +290,11 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
FilterResolverIntf resolver =
new IndexFilter(segmentProperties, this.getCarbonTable(), filter.getExpression())
.getResolver();
- // prepare filter executer using datmapFilter resolver
- FilterExecuter filterExecuter =
- FilterUtil.getFilterExecuterTree(resolver, segmentProperties, null, null, false);
+ // prepare filter executor using IndexFilter resolver
+ FilterExecutor filterExecutor =
+ FilterUtil.getFilterExecutorTree(resolver, segmentProperties, null, null, false);
// check if block has to be pruned based on segment minmax
- BitSet scanRequired = filterExecuter.isScanRequired(max, min, minMaxFlag);
+ BitSet scanRequired = filterExecutor.isScanRequired(max, min, minMaxFlag);
if (!scanRequired.isEmpty()) {
isScanRequired = true;
}
@@ -374,7 +374,7 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
}
/**
- * Get the blocklet detail information based on blockletid, blockid and segmentId. This method is
+ * Get the blocklet detail information based on blockletId, blockId and segmentId. This method is
* exclusively for BlockletIndexFactory as detail information is only available in this
* default index.
*/
@@ -447,19 +447,19 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
@Override
public List<IndexInputSplit> toDistributable(Segment segment) {
- List<IndexInputSplit> distributables = new ArrayList<>();
+ List<IndexInputSplit> distributableList = new ArrayList<>();
try {
BlockletIndexInputSplit distributable = new BlockletIndexInputSplit();
distributable.setSegment(segment);
distributable.setIndexSchema(INDEX_SCHEMA);
distributable.setSegmentPath(CarbonTablePath.getSegmentPath(identifier.getTablePath(),
segment.getSegmentNo()));
- distributables.add(new IndexInputSplitWrapper(UUID.randomUUID().toString(),
+ distributableList.add(new IndexInputSplitWrapper(UUID.randomUUID().toString(),
distributable).getDistributable());
} catch (Exception e) {
throw new RuntimeException(e);
}
- return distributables;
+ return distributableList;
}
@Override
@@ -680,10 +680,10 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
}
@Override
- public List<IndexInputSplit> getAllUncachedDistributables(
- List<IndexInputSplit> distributables) throws IOException {
- List<IndexInputSplit> distributablesToBeLoaded = new ArrayList<>(distributables.size());
- for (IndexInputSplit distributable : distributables) {
+ public List<IndexInputSplit> getAllUncached(
+ List<IndexInputSplit> distributableList) throws IOException {
+ List<IndexInputSplit> distributableToBeLoaded = new ArrayList<>(distributableList.size());
+ for (IndexInputSplit distributable : distributableList) {
Segment segment = distributable.getSegment();
Set<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers =
getTableBlockIndexUniqueIdentifiers(segment);
@@ -695,10 +695,10 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
new TableBlockIndexUniqueIdentifierWrapper(validIdentifier, this.getCarbonTable()))) {
((BlockletIndexInputSplit) distributable)
.setTableBlockIndexUniqueIdentifier(validIdentifier);
- distributablesToBeLoaded.add(distributable);
+ distributableToBeLoaded.add(distributable);
}
}
- return distributablesToBeLoaded;
+ return distributableToBeLoaded;
}
private Set<TableBlockIndexUniqueIdentifier> getTableSegmentUniqueIdentifiers(Segment segment)
@@ -719,9 +719,9 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
}
@Override
- public List<IndexInputSplit> getAllUncachedDistributables(List<Segment> validSegments,
+ public List<IndexInputSplit> getAllUncached(List<Segment> validSegments,
IndexExprWrapper indexExprWrapper) throws IOException {
- List<IndexInputSplit> distributablesToBeLoaded = new ArrayList<>();
+ List<IndexInputSplit> distributableToBeLoaded = new ArrayList<>();
for (Segment segment : validSegments) {
IndexInputSplitWrapper indexInputSplitWrappers =
indexExprWrapper.toDistributableSegment(segment);
@@ -733,11 +733,11 @@ public class BlockletIndexFactory extends CoarseGrainIndexFactory
if (identifier.getIndexFilePath() == null || blockletIndexWrapper == null) {
((BlockletIndexInputSplit) indexInputSplitWrappers.getDistributable())
.setTableBlockIndexUniqueIdentifier(identifier);
- distributablesToBeLoaded.add(indexInputSplitWrappers.getDistributable());
+ distributableToBeLoaded.add(indexInputSplitWrappers.getDistributable());
}
}
}
- return distributablesToBeLoaded;
+ return distributableToBeLoaded;
}
@Override
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexRowIndexes.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexRowIndexes.java
index 421870d..f3b9ce9 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexRowIndexes.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexRowIndexes.java
@@ -33,7 +33,7 @@ public interface BlockletIndexRowIndexes {
int VERSION_INDEX = 4;
- int SCHEMA_UPADATED_TIME_INDEX = 5;
+ int SCHEMA_UPDATED_TIME_INDEX = 5;
int BLOCK_FOOTER_OFFSET = 6;
@@ -59,7 +59,7 @@ public interface BlockletIndexRowIndexes {
int SUMMARY_INDEX_FILE_NAME = 3;
- int SUMMARY_SEGMENTID = 4;
+ int SUMMARY_SEGMENT_ID = 4;
int TASK_MIN_MAX_FLAG = 5;
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
index bca448d..8fbc7f6 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
@@ -66,12 +66,12 @@ public class SegmentIndexFileStore {
private static final Logger LOGGER =
LogServiceFactory.getLogService(SegmentIndexFileStore.class.getName());
/**
- * Stores the indexfile name and related binary file data in it.
+ * Stores the index file name and related binary file data in it.
*/
private Map<String, byte[]> carbonIndexMap;
/**
- * Stores the indexfile name and related binary file data in it.
+ * Stores the index file name and related binary file data in it.
*/
private Map<String, byte[]> carbonIndexMapWithFullPath;
@@ -168,7 +168,7 @@ public class SegmentIndexFileStore {
* @param segmentPath
* @throws IOException
*/
- public void readAllIndexAndFillBolckletInfo(String segmentPath) throws IOException {
+ public void readAllIndexAndFillBlockletInfo(String segmentPath) throws IOException {
CarbonFile[] carbonIndexFiles =
getCarbonIndexFiles(segmentPath, FileFactory.getConfiguration());
for (int i = 0; i < carbonIndexFiles.length; i++) {
@@ -290,7 +290,7 @@ public class SegmentIndexFileStore {
}
/**
- * Read carbonindex file and convert to stream and add to map
+ * Read carbon index file and convert to stream and add to map
*
* @param indexFile
* @throws IOException
@@ -329,7 +329,7 @@ public class SegmentIndexFileStore {
}
/**
- * Get the carbonindex file content
+ * Get the carbon index file content
*
* @param fileName
* @return
@@ -468,7 +468,7 @@ public class SegmentIndexFileStore {
blockIndexReplica
.setBlock_index(CarbonMetadataUtil.getBlockletIndex(blockletInfo.getBlockletIndex()));
blockIndexReplica
- .setBlocklet_info(CarbonMetadataUtil.getBlocletInfo3(blockletInfo));
+ .setBlocklet_info(CarbonMetadataUtil.getBlockletInfo3(blockletInfo));
blockIndexThrift.add(blockIndexReplica);
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/row/IndexRowImpl.java b/core/src/main/java/org/apache/carbondata/core/indexstore/row/IndexRowImpl.java
index 0cc8f18..0450d03 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/row/IndexRowImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/row/IndexRowImpl.java
@@ -41,7 +41,7 @@ public class IndexRowImpl extends IndexRow {
@Override
public int getLengthInBytes(int ordinal) {
- // if the segment data is written in tablepath
+ // if the segment data is written in table path
// then the data[BlockletIndexRowIndexes.SUMMARY_INDEX_PATH] will be null.
if (data[ordinal] == null) {
return 0;
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java b/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
index 620d0fb..8527101 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/schema/SchemaGenerator.java
@@ -73,7 +73,7 @@ public class SchemaGenerator {
private static void updateBytePosition(CarbonRowSchema[] schema) {
int currentSize;
int bytePosition = 0;
- // First assign byte postion to all the fixed length schema
+ // First assign byte position to all the fixed length schema
for (int i = 0; i < schema.length; i++) {
switch (schema[i].getSchemaType()) {
case STRUCT:
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
index 6ef987f..2325f01 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
@@ -129,7 +129,7 @@ public class DateDirectDictionaryGenerator implements DirectDictionaryGenerator
}
dateToStr = null;
}
- //adding +2 to reserve the first cuttOffDiff value for null or empty date
+ //adding +2 to reserve the first cutOffDiff value for null or empty date
if (null == dateToStr) {
return CarbonCommonConstants.DIRECT_DICT_VALUE_NULL;
} else {
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
index 3eb6f65..e1927e4 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
@@ -169,7 +169,7 @@ public class TimeStampDirectDictionaryGenerator implements DirectDictionaryGener
}
dateToStr = null;
}
- //adding +2 to reserve the first cuttOffDiff value for null or empty date
+ //adding +2 to reserve the first cutOffDiff value for null or empty date
if (null == dateToStr) {
return CarbonCommonConstants.DIRECT_DICT_VALUE_NULL;
} else {
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampGranularityConstants.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampGranularityConstants.java
index a624bab..2af61e9 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampGranularityConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampGranularityConstants.java
@@ -29,7 +29,7 @@ public interface TimeStampGranularityConstants {
*/
String CARBON_CUTOFF_TIMESTAMP = "carbon.cutOffTimestamp";
/**
- * The property to set the timestamp (ie milis) conversion to the SECOND, MINUTE, HOUR
+ * The property to set the timestamp (ie millisecond) conversion to the SECOND, MINUTE, HOUR
* or DAY level
*/
String CARBON_TIME_GRANULARITY = "carbon.timegranularity";
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/mdkey/Bits.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/mdkey/Bits.java
index 9ce585a..548292f 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/mdkey/Bits.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/mdkey/Bits.java
@@ -44,9 +44,9 @@ public class Bits implements Serializable {
*/
private int[] lens;
/**
- * wsize.
+ * word size.
*/
- private int wsize;
+ private int wSize;
/**
* byteSize.
*/
@@ -56,11 +56,11 @@ public class Bits implements Serializable {
this.lens = lens;
this.length = getTotalLength(lens);
- wsize = length / LONG_LENGTH;
+ wSize = length / LONG_LENGTH;
byteSize = length / 8;
if (length % LONG_LENGTH != 0) {
- wsize++;
+ wSize++;
}
if (length % 8 != 0) {
@@ -116,7 +116,7 @@ public class Bits implements Serializable {
}
protected long[] get(long[] keys) {
- long[] words = new long[wsize];
+ long[] words = new long[wSize];
int ll = 0;
int minLength = Math.min(lens.length, keys.length);
for (int i = minLength - 1; i >= 0; i--) {
@@ -153,7 +153,7 @@ public class Bits implements Serializable {
}
protected long[] get(int[] keys) {
- long[] words = new long[wsize];
+ long[] words = new long[wSize];
int ll = 0;
int minLength = Math.min(lens.length, keys.length);
for (int i = minLength - 1; i >= 0; i--) {
@@ -191,7 +191,7 @@ public class Bits implements Serializable {
}
private long[] getArray(long[] words) {
- long[] vals = new long[lens.length];
+ long[] values = new long[lens.length];
int ll = 0;
for (int i = lens.length - 1; i >= 0; i--) {
@@ -200,8 +200,8 @@ public class Bits implements Serializable {
long val = words[index];
long mask = (LONG_MAX >>> (MAX_LENGTH - lens[i]));
mask = mask << pos;
- vals[i] = (val & mask);
- vals[i] >>>= pos;
+ values[i] = (val & mask);
+ values[i] >>>= pos;
ll += lens[i];
int nextIndex = ll >> 6;
@@ -211,11 +211,11 @@ public class Bits implements Serializable {
if (pos != 0) {
mask = (LONG_MAX >>> (MAX_LENGTH - pos));
val = words[nextIndex];
- vals[i] = vals[i] | ((val & mask) << (lens[i] - pos));
+ values[i] = values[i] | ((val & mask) << (lens[i] - pos));
}
}
}
- return vals;
+ return values;
}
public byte[] getBytes(long[] keys) {
@@ -255,7 +255,7 @@ public class Bits implements Serializable {
int length = 8;
int ls = byteSize;
- long[] words = new long[wsize];
+ long[] words = new long[wSize];
for (int i = 0; i < words.length; i++) {
long l = 0;
ls -= 8;
@@ -281,7 +281,7 @@ public class Bits implements Serializable {
int length = 8;
int ls = byteSize;
- long[] words = new long[wsize];
+ long[] words = new long[wSize];
for (int i = 0; i < words.length; i++) {
long l = 0;
ls -= 8;
diff --git a/core/src/main/java/org/apache/carbondata/core/localdictionary/dictionaryholder/MapBasedDictionaryStore.java b/core/src/main/java/org/apache/carbondata/core/localdictionary/dictionaryholder/MapBasedDictionaryStore.java
index 89a26fe..4581962 100644
--- a/core/src/main/java/org/apache/carbondata/core/localdictionary/dictionaryholder/MapBasedDictionaryStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/localdictionary/dictionaryholder/MapBasedDictionaryStore.java
@@ -43,7 +43,7 @@ public class MapBasedDictionaryStore implements DictionaryStore {
/**
* maintaining array for reverse lookup
- * otherwise iterating everytime in map for reverse lookup will be slowdown the performance
+ * otherwise iterating every time in map for reverse lookup will be slowdown the performance
* It will only maintain the reference
*/
private DictionaryByteArrayWrapper[] referenceDictionaryArray;
@@ -64,7 +64,7 @@ public class MapBasedDictionaryStore implements DictionaryStore {
private boolean isThresholdReached;
/**
- * current datasize
+ * current data size
*/
private long currentSize;
@@ -92,13 +92,13 @@ public class MapBasedDictionaryStore implements DictionaryStore {
Integer value = dictionary.get(key);
// if value is null then dictionary is not present in store
if (null == value) {
- // aquire the lock
+ // acquire the lock
synchronized (dictionary) {
// check threshold
checkIfThresholdReached();
// get the value again as other thread might have added
value = dictionary.get(key);
- // double chekcing
+ // double checking
if (null == value) {
// increment the value
value = ++lastAssignValue;
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/AlluxioFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/AlluxioFileLock.java
index e871f53..a20c302 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/AlluxioFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/AlluxioFileLock.java
@@ -26,7 +26,7 @@ import org.apache.log4j.Logger;
/**
* This class is used to handle the Alluxio File locking.
- * This is acheived using the concept of acquiring the data out stream using Append option.
+ * This is achieved by using the concept of acquiring the data out stream using Append option.
*/
public class AlluxioFileLock extends HdfsFileLock {
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
index 2330494..6f4bb1d 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
@@ -69,7 +69,7 @@ public class CarbonLockFactory {
absoluteLockPath = absoluteTableIdentifier.getTablePath();
} else {
absoluteLockPath =
- getLockpath(absoluteTableIdentifier.getCarbonTableIdentifier().getTableId());
+ getLockPath(absoluteTableIdentifier.getCarbonTableIdentifier().getTableId());
}
FileFactory.FileType fileType = FileFactory.getFileType(absoluteLockPath);
if (lockTypeConfigured.equals(CarbonCommonConstants.CARBON_LOCK_TYPE_CUSTOM)) {
@@ -103,7 +103,7 @@ public class CarbonLockFactory {
if (lockPath.isEmpty()) {
lockFileLocation = locFileLocation;
} else {
- lockFileLocation = getLockpath("1");
+ lockFileLocation = getLockPath("1");
}
switch (lockTypeConfigured) {
case CarbonCommonConstants.CARBON_LOCK_TYPE_CUSTOM:
@@ -139,7 +139,7 @@ public class CarbonLockFactory {
CarbonLockFactory.lockConstructor = getCustomLockConstructor(lockClassName);
}
- public static String getLockpath(String tableId) {
+ public static String getLockPath(String tableId) {
return lockPath + CarbonCommonConstants.FILE_SEPARATOR + tableId;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
index f940ddb..6b6e607 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
@@ -43,29 +43,29 @@ public class CarbonLockUtil {
*
* @param carbonLock
*/
- public static void fileUnlock(ICarbonLock carbonLock, String locktype) {
+ public static void fileUnlock(ICarbonLock carbonLock, String lockType) {
if (carbonLock.unlock()) {
- if (locktype.equals(LockUsage.METADATA_LOCK)) {
+ if (lockType.equals(LockUsage.METADATA_LOCK)) {
LOGGER.info("Metadata lock has been successfully released");
- } else if (locktype.equals(LockUsage.TABLE_STATUS_LOCK)) {
+ } else if (lockType.equals(LockUsage.TABLE_STATUS_LOCK)) {
LOGGER.info("Table status lock has been successfully released");
- } else if (locktype.equals(LockUsage.CLEAN_FILES_LOCK)) {
+ } else if (lockType.equals(LockUsage.CLEAN_FILES_LOCK)) {
LOGGER.info("Clean files lock has been successfully released");
- } else if (locktype.equals(LockUsage.DELETE_SEGMENT_LOCK)) {
+ } else if (lockType.equals(LockUsage.DELETE_SEGMENT_LOCK)) {
LOGGER.info("Delete segments lock has been successfully released");
- } else if (locktype.equals(LockUsage.INDEX_STATUS_LOCK)) {
+ } else if (lockType.equals(LockUsage.INDEX_STATUS_LOCK)) {
LOGGER.info("Index status lock has been successfully released");
}
} else {
- if (locktype.equals(LockUsage.METADATA_LOCK)) {
+ if (lockType.equals(LockUsage.METADATA_LOCK)) {
LOGGER.error("Not able to release the metadata lock");
- } else if (locktype.equals(LockUsage.TABLE_STATUS_LOCK)) {
+ } else if (lockType.equals(LockUsage.TABLE_STATUS_LOCK)) {
LOGGER.error("Not able to release the table status lock");
- } else if (locktype.equals(LockUsage.CLEAN_FILES_LOCK)) {
+ } else if (lockType.equals(LockUsage.CLEAN_FILES_LOCK)) {
LOGGER.info("Not able to release the clean files lock");
- } else if (locktype.equals(LockUsage.DELETE_SEGMENT_LOCK)) {
+ } else if (lockType.equals(LockUsage.DELETE_SEGMENT_LOCK)) {
LOGGER.info("Not able to release the delete segments lock");
- } else if (locktype.equals(LockUsage.INDEX_STATUS_LOCK)) {
+ } else if (lockType.equals(LockUsage.INDEX_STATUS_LOCK)) {
LOGGER.info("Not able to release the index status lock");
}
}
@@ -130,7 +130,7 @@ public class CarbonLockUtil {
lockFilesDir = CarbonTablePath.getLockFilesDirPath(absoluteTableIdentifier.getTablePath());
} else {
lockFilesDir = CarbonTablePath.getLockFilesDirPath(
- CarbonLockFactory.getLockpath(carbonTable.getTableInfo().getFactTable().getTableId()));
+ CarbonLockFactory.getLockPath(carbonTable.getTableInfo().getFactTable().getTableId()));
}
CarbonFile[] files = FileFactory.getCarbonFile(lockFilesDir)
.listFiles(new CarbonFileFilter() {
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
index ca1343e..95a3ffc 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java
@@ -28,7 +28,7 @@ import org.apache.log4j.Logger;
/**
* This class is used to handle the HDFS File locking.
- * This is achieved using the concept of acquiring the data out stream using Append option.
+ * This is achieved by using the concept of acquiring the data out stream using Append option.
*/
public class HdfsFileLock extends AbstractCarbonLock {
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ICarbonLock.java b/core/src/main/java/org/apache/carbondata/core/locks/ICarbonLock.java
index ab20a5e..3b7ee5a 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ICarbonLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ICarbonLock.java
@@ -30,7 +30,7 @@ public interface ICarbonLock {
boolean unlock();
/**
- * This will acquire the lock and if it doesnt get then it will retry after the confiured time.
+ * This will acquire the lock and if it doesnt get then it will retry after the configured time.
*
* @return
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
index 5f2b4c4..0720c97 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
@@ -33,7 +33,7 @@ import org.apache.log4j.Logger;
/**
* This class handles the file locking in the local file system.
- * This will be handled using the file channel lock API.
+ * This will be handled by using the file channel lock API.
*/
public class LocalFileLock extends AbstractCarbonLock {
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/LockUsage.java b/core/src/main/java/org/apache/carbondata/core/locks/LockUsage.java
index e032b53..8098bc6 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/LockUsage.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/LockUsage.java
@@ -18,7 +18,7 @@
package org.apache.carbondata.core.locks;
/**
- * This enum is used to define the usecase of the lock.
+ * This enum is used to define the use case of the lock.
* Each enum value is one specific lock case.
*/
public class LockUsage {
@@ -26,7 +26,7 @@ public class LockUsage {
public static final String METADATA_LOCK = "meta.lock";
public static final String COMPACTION_LOCK = "compaction.lock";
public static final String HANDOFF_LOCK = "handoff.lock";
- public static final String SYSTEMLEVEL_COMPACTION_LOCK = "system_level_compaction.lock";
+ public static final String SYSTEM_LEVEL_COMPACTION_LOCK = "system_level_compaction.lock";
public static final String ALTER_PARTITION_LOCK = "alter_partition.lock";
public static final String TABLE_STATUS_LOCK = "tablestatus.lock";
public static final String TABLE_UPDATE_STATUS_LOCK = "tableupdatestatus.lock";
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
index 1b8688c..48e19c4 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/S3FileLock.java
@@ -29,7 +29,7 @@ import org.apache.log4j.Logger;
/**
* This class is used to handle the S3 File locking.
- * This is acheived using the concept of acquiring the data out stream using Append option.
+ * This is achieved by using the concept of acquiring the data out stream using Append option.
*/
public class S3FileLock extends AbstractCarbonLock {
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java b/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
index 0dc8bbf..844f0f8 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
@@ -99,7 +99,7 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
createBaseNode();
// if exists returns null then path doesnt exist. so creating.
if (null == zk.exists(this.tableIdFolder, true)) {
- createRecursivly(this.tableIdFolder);
+ createRecursively(this.tableIdFolder);
}
// if exists returns null then path doesnt exist. so creating.
if (null == zk.exists(this.lockTypeFolder, true)) {
@@ -122,15 +122,15 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
}
/**
- * Create zookeepr node if not exist
+ * Create zookeeper node if not exist
* @param path
* @throws KeeperException
* @throws InterruptedException
*/
- private void createRecursivly(String path) throws KeeperException, InterruptedException {
+ private void createRecursively(String path) throws KeeperException, InterruptedException {
if (zk.exists(path, true) == null && path.length() > 0) {
String temp = path.substring(0, path.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR));
- createRecursivly(temp);
+ createRecursively(temp);
zk.create(path, null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
}
@@ -149,7 +149,7 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
// get the children present in zooKeeperLocation.
List<String> nodes = zk.getChildren(this.lockTypeFolder, null);
- // sort the childrens
+ // sort the children
Collections.sort(nodes);
// here the logic is , for each lock request zookeeper will create a file ending with
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/CarbonUnsafe.java b/core/src/main/java/org/apache/carbondata/core/memory/CarbonUnsafe.java
index ecadd1e..1743595 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/CarbonUnsafe.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/CarbonUnsafe.java
@@ -36,7 +36,7 @@ public final class CarbonUnsafe {
public static final int FLOAT_ARRAY_OFFSET;
- public static final boolean ISLITTLEENDIAN =
+ public static final boolean IS_LITTLE_ENDIAN =
ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN);
private static Unsafe unsafe;
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/MemoryBlock.java b/core/src/main/java/org/apache/carbondata/core/memory/MemoryBlock.java
index 563521a..3c7f1b0 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/MemoryBlock.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/MemoryBlock.java
@@ -33,7 +33,7 @@ public class MemoryBlock extends MemoryLocation {
private boolean isFreed;
/**
- * Whether it is offheap or onheap memory type
+ * Whether it is off-heap or on-heap memory type
*/
private MemoryType memoryType;
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
index 466c023..22f2214 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
@@ -44,7 +44,7 @@ public class UnsafeMemoryManager {
private static boolean offHeap = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT,
CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT));
- private static Map<String, Set<MemoryBlock>> taskIdToOffheapMemoryBlockMap;
+ private static Map<String, Set<MemoryBlock>> taskIdToOffHeapMemoryBlockMap;
static {
long size = 0L;
String configuredWorkingMemorySize = null;
@@ -71,7 +71,7 @@ public class UnsafeMemoryManager {
}
}
} catch (Exception e) {
- LOGGER.info("Invalid offheap working memory size value: " + configuredWorkingMemorySize);
+ LOGGER.info("Invalid off-heap working memory size value: " + configuredWorkingMemorySize);
}
long takenSize = size;
MemoryType memoryType;
@@ -81,7 +81,7 @@ public class UnsafeMemoryManager {
if (takenSize < defaultSize) {
takenSize = defaultSize;
LOGGER.warn(String.format(
- "It is not recommended to set offheap working memory size less than %sMB,"
+ "It is not recommended to set off-heap working memory size less than %sMB,"
+ " so setting default value to %d",
CarbonCommonConstants.UNSAFE_WORKING_MEMORY_IN_MB_DEFAULT, defaultSize));
}
@@ -92,7 +92,7 @@ public class UnsafeMemoryManager {
memoryType = MemoryType.ONHEAP;
}
INSTANCE = new UnsafeMemoryManager(takenSize, memoryType);
- taskIdToOffheapMemoryBlockMap = new HashMap<>();
+ taskIdToOffHeapMemoryBlockMap = new HashMap<>();
}
public static final UnsafeMemoryManager INSTANCE;
@@ -106,7 +106,7 @@ public class UnsafeMemoryManager {
private UnsafeMemoryManager(long totalMemory, MemoryType memoryType) {
this.totalMemory = totalMemory;
this.memoryType = memoryType;
- LOGGER.info("Offheap Working Memory manager is created with size " + totalMemory + " with "
+ LOGGER.info("Off-heap Working Memory manager is created with size " + totalMemory + " with "
+ memoryType);
}
@@ -116,14 +116,14 @@ public class UnsafeMemoryManager {
if (memoryUsed + memoryRequested <= totalMemory && memoryType == MemoryType.OFFHEAP) {
memoryBlock = MemoryAllocator.UNSAFE.allocate(memoryRequested);
memoryUsed += memoryBlock.size();
- Set<MemoryBlock> listOfMemoryBlock = taskIdToOffheapMemoryBlockMap.get(taskId);
+ Set<MemoryBlock> listOfMemoryBlock = taskIdToOffHeapMemoryBlockMap.get(taskId);
if (null == listOfMemoryBlock) {
listOfMemoryBlock = new HashSet<>();
- taskIdToOffheapMemoryBlockMap.put(taskId, listOfMemoryBlock);
+ taskIdToOffHeapMemoryBlockMap.put(taskId, listOfMemoryBlock);
}
listOfMemoryBlock.add(memoryBlock);
if (LOGGER.isDebugEnabled()) {
- LOGGER.debug(String.format("Creating Offheap working Memory block (%s) with size %d."
+ LOGGER.debug(String.format("Creating off-heap working Memory block (%s) with size %d."
+ " Total memory used %d Bytes, left %d Bytes.",
memoryBlock.toString(), memoryBlock.size(), memoryUsed, totalMemory - memoryUsed));
}
@@ -132,22 +132,22 @@ public class UnsafeMemoryManager {
memoryBlock = MemoryAllocator.HEAP.allocate(memoryRequested);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String
- .format("Creating onheap working Memory block with size: (%d)", memoryBlock.size()));
+ .format("Creating on-heap working Memory block with size: (%d)", memoryBlock.size()));
}
}
return memoryBlock;
}
public synchronized void freeMemory(String taskId, MemoryBlock memoryBlock) {
- if (taskIdToOffheapMemoryBlockMap.containsKey(taskId)) {
- taskIdToOffheapMemoryBlockMap.get(taskId).remove(memoryBlock);
+ if (taskIdToOffHeapMemoryBlockMap.containsKey(taskId)) {
+ taskIdToOffHeapMemoryBlockMap.get(taskId).remove(memoryBlock);
}
if (!memoryBlock.isFreedStatus()) {
getMemoryAllocator(memoryBlock.getMemoryType()).free(memoryBlock);
memoryUsed -= memoryBlock.size();
memoryUsed = memoryUsed < 0 ? 0 : memoryUsed;
if (LOGGER.isDebugEnabled() && memoryBlock.getMemoryType() == MemoryType.OFFHEAP) {
- LOGGER.debug(String.format("Freeing offheap working memory block (%s) with size: %d, "
+ LOGGER.debug(String.format("Freeing off-heap working memory block (%s) with size: %d, "
+ "current available memory is: %d", memoryBlock.toString(), memoryBlock.size(),
totalMemory - memoryUsed));
}
@@ -156,29 +156,29 @@ public class UnsafeMemoryManager {
public synchronized void freeMemoryAll(String taskId) {
Set<MemoryBlock> memoryBlockSet;
- memoryBlockSet = taskIdToOffheapMemoryBlockMap.remove(taskId);
- long occuppiedMemory = 0;
+ memoryBlockSet = taskIdToOffHeapMemoryBlockMap.remove(taskId);
+ long occupiedMemory = 0;
if (null != memoryBlockSet) {
Iterator<MemoryBlock> iterator = memoryBlockSet.iterator();
MemoryBlock memoryBlock = null;
while (iterator.hasNext()) {
memoryBlock = iterator.next();
if (!memoryBlock.isFreedStatus()) {
- occuppiedMemory += memoryBlock.size();
+ occupiedMemory += memoryBlock.size();
getMemoryAllocator(memoryBlock.getMemoryType()).free(memoryBlock);
}
}
}
- memoryUsed -= occuppiedMemory;
+ memoryUsed -= occupiedMemory;
memoryUsed = memoryUsed < 0 ? 0 : memoryUsed;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format(
- "Freeing offheap working memory of size %d. Current available memory is %d",
- occuppiedMemory, totalMemory - memoryUsed));
+ "Freeing off-heap working memory of size %d. Current available memory is %d",
+ occupiedMemory, totalMemory - memoryUsed));
}
LOGGER.info(String.format(
- "Total offheap working memory used after task %s is %d. Current running tasks are %s",
- taskId, memoryUsed, StringUtils.join(taskIdToOffheapMemoryBlockMap.keySet(), ", ")));
+ "Total off-heap working memory used after task %s is %d. Current running tasks are %s",
+ taskId, memoryUsed, StringUtils.join(taskIdToOffHeapMemoryBlockMap.keySet(), ", ")));
}
public long getUsableMemory() {
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
index 07e1a55..68b2354 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeSortMemoryManager.java
@@ -43,14 +43,14 @@ public class UnsafeSortMemoryManager {
LogServiceFactory.getLogService(UnsafeSortMemoryManager.class.getName());
/**
- * offheap is enabled
+ * off-heap is enabled
*/
private static boolean offHeap = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT,
CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT));
/**
- * map to keep taskid to memory blocks
+ * map to keep task id to memory blocks
*/
private static Map<String, Set<MemoryBlock>> taskIdToMemoryBlockMap;
@@ -145,24 +145,24 @@ public class UnsafeSortMemoryManager {
public synchronized void freeMemoryAll(String taskId) {
Set<MemoryBlock> memoryBlockSet = null;
memoryBlockSet = taskIdToMemoryBlockMap.remove(taskId);
- long occuppiedMemory = 0;
+ long occupiedMemory = 0;
if (null != memoryBlockSet) {
Iterator<MemoryBlock> iterator = memoryBlockSet.iterator();
MemoryBlock memoryBlock = null;
while (iterator.hasNext()) {
memoryBlock = iterator.next();
if (!memoryBlock.isFreedStatus()) {
- occuppiedMemory += memoryBlock.size();
+ occupiedMemory += memoryBlock.size();
allocator.free(memoryBlock);
}
}
}
- memoryUsed -= occuppiedMemory;
+ memoryUsed -= occupiedMemory;
memoryUsed = memoryUsed < 0 ? 0 : memoryUsed;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
String.format("Freeing sort memory of size: %d, current available memory is: %d",
- occuppiedMemory, totalMemory - memoryUsed));
+ occupiedMemory, totalMemory - memoryUsed));
}
LOGGER.info(String.format(
"Total sort memory used after task %s is %d. Current running tasks are: %s",
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/CarbonMetadata.java b/core/src/main/java/org/apache/carbondata/core/metadata/CarbonMetadata.java
index f25496f..7c7382b 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/CarbonMetadata.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/CarbonMetadata.java
@@ -34,7 +34,7 @@ public final class CarbonMetadata {
/**
* meta data instance
*/
- private static final CarbonMetadata CARBONMETADATAINSTANCE = new CarbonMetadata();
+ private static final CarbonMetadata INSTANCE = new CarbonMetadata();
/**
* holds the list of tableInfo currently present
@@ -47,16 +47,16 @@ public final class CarbonMetadata {
}
public static CarbonMetadata getInstance() {
- return CARBONMETADATAINSTANCE;
+ return INSTANCE;
}
/**
* removed the table information
*
- * @param tableUniquName
+ * @param tableUniqueName
*/
- public void removeTable(String tableUniquName) {
- tableInfoMap.remove(convertToLowerCase(tableUniquName));
+ public void removeTable(String tableUniqueName) {
+ tableInfoMap.remove(convertToLowerCase(tableUniqueName));
}
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java b/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java
index ca8b31e..a702899 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java
@@ -87,7 +87,7 @@ public class CarbonTableIdentifier implements Serializable {
}
/**
- *Creates the key for bad record lgger.
+ *Creates the key for bad record logger.
*/
public String getBadRecordLoggerKey() {
return databaseName + File.separator + tableName + '_' + tableId;
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
index 545d5b6..917d68a 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
@@ -98,12 +98,12 @@ public class SegmentFileStore {
}
/**
- * Write segment information to the segment folder with indexfilename and
+ * Write segment information to the segment folder with index file name and
* corresponding partitions.
*/
public static void writeSegmentFile(String tablePath, final String taskNo, String location,
- String timeStamp, List<String> partionNames) throws IOException {
- writeSegmentFile(tablePath, taskNo, location, timeStamp, partionNames, false);
+ String timeStamp, List<String> partitionNames) throws IOException {
+ writeSegmentFile(tablePath, taskNo, location, timeStamp, partitionNames, false);
}
/**
@@ -157,11 +157,11 @@ public class SegmentFileStore {
}
/**
- * Write segment information to the segment folder with indexfilename and
+ * Write segment information to the segment folder with index file name and
* corresponding partitions.
*/
public static void writeSegmentFile(String tablePath, final String taskNo, String location,
- String timeStamp, List<String> partionNames, boolean isMergeIndexFlow) throws IOException {
+ String timeStamp, List<String> partitionNames, boolean isMergeIndexFlow) throws IOException {
String tempFolderLoc = timeStamp + ".tmp";
String writePath = CarbonTablePath.getSegmentFilesLocation(tablePath) + "/" + tempFolderLoc;
CarbonFile carbonFile = FileFactory.getCarbonFile(writePath);
@@ -176,8 +176,8 @@ public class SegmentFileStore {
.getCarbonFile(location + CarbonCommonConstants.FILE_SEPARATOR + tempFolderLoc);
}
- if ((tempFolder.exists() && partionNames.size() > 0) || (isMergeIndexFlow
- && partionNames.size() > 0)) {
+ if ((tempFolder.exists() && partitionNames.size() > 0) || (isMergeIndexFlow
+ && partitionNames.size() > 0)) {
CarbonFile[] carbonFiles = tempFolder.listFiles(new CarbonFileFilter() {
@Override
public boolean accept(CarbonFile file) {
@@ -195,7 +195,7 @@ public class SegmentFileStore {
SegmentFile segmentFile = new SegmentFile();
FolderDetails folderDetails = new FolderDetails();
folderDetails.setRelative(isRelative);
- folderDetails.setPartitions(partionNames);
+ folderDetails.setPartitions(partitionNames);
folderDetails.setStatus(SegmentStatus.SUCCESS.getMessage());
for (CarbonFile file : carbonFiles) {
if (file.getName().endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT)) {
@@ -208,7 +208,7 @@ public class SegmentFileStore {
String path = null;
if (isMergeIndexFlow) {
// in case of merge index flow, tasks are launched per partition and all the tasks
- // will be writting to the same tmp folder, in that case taskNo is not unique.
+ // will be written to the same tmp folder, in that case taskNo is not unique.
// To generate a unique fileName UUID is used
path = writePath + "/" + CarbonUtil.generateUUID() + CarbonTablePath.SEGMENT_EXT;
} else {
@@ -588,7 +588,7 @@ public class SegmentFileStore {
CarbonCommonConstants.MAX_TIMEOUT_FOR_CONCURRENT_LOCK_DEFAULT);
try {
if (carbonLock.lockWithRetries(retryCount, maxTimeout)) {
- LOGGER.info("Acquired lock for tablepath" + tablePath + " for table status updation");
+ LOGGER.info("Acquired lock for table path" + tablePath + " for table status update");
LoadMetadataDetails[] listOfLoadFolderDetailsArray =
SegmentStatusManager.readLoadMetadata(metadataPath);
@@ -622,15 +622,15 @@ public class SegmentFileStore {
status = true;
} else {
LOGGER.error(
- "Not able to acquire the lock for Table status updation for table path " + tablePath);
+ "Not able to acquire the lock for Table status update for table path " + tablePath);
}
} finally {
if (carbonLock.unlock()) {
- LOGGER.info("Table unlocked successfully after table status updation" + tablePath);
+ LOGGER.info("Table unlocked successfully after table status update" + tablePath);
} else {
LOGGER.error(
- "Unable to unlock Table lock for table" + tablePath + " during table status updation");
+ "Unable to unlock Table lock for table" + tablePath + " during table status update");
}
}
return status;
@@ -803,7 +803,7 @@ public class SegmentFileStore {
for (Map.Entry<String, byte[]> entry : carbonIndexMap.entrySet()) {
List<DataFileFooter> indexInfo =
fileFooterConverter.getIndexInfo(entry.getKey(), entry.getValue());
- // carbonindex file stores blocklets so block filename will be duplicated, use set to remove
+ // carbon index file stores blocklets so block filename will be duplicated, use set to remove
// duplicates
Set<String> blocks = new LinkedHashSet<>();
for (DataFileFooter footer : indexInfo) {
@@ -886,7 +886,7 @@ public class SegmentFileStore {
String location = entry.getKey();
if (entry.getValue().isRelative) {
if (location.equals("/")) {
- // incase of flat folder, the relative segment location is '/',
+ // in case of flat folder, the relative segment location is '/',
// so don't append it as we again add file separator for file names.
location = tablePath;
} else {
@@ -980,7 +980,7 @@ public class SegmentFileStore {
+ CarbonTablePath.SEGMENT_EXT;
writeSegmentFile(segmentFile, writePath);
}
- // Check whether we can completly remove the segment.
+ // Check whether we can completely remove the segment.
boolean deleteSegment = true;
for (Map.Entry<String, FolderDetails> entry : segmentFile.getLocationMap().entrySet()) {
if (entry.getValue().getStatus().equals(SegmentStatus.SUCCESS.getMessage())) {
@@ -1055,7 +1055,7 @@ public class SegmentFileStore {
}
for (Map.Entry<String, List<String>> entry : fileStore.indexFilesMap.entrySet()) {
String indexFile = entry.getKey();
- // Check the partition information in the partiton mapper
+ // Check the partition information in the partition mapper
Long fileTimestamp = CarbonUpdateUtil.getTimeStampAsLong(indexFile
.substring(indexFile.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1,
indexFile.length() - CarbonTablePath.INDEX_FILE_EXT.length()));
@@ -1131,15 +1131,15 @@ public class SegmentFileStore {
*/
private static void deletePhysicalPartition(List<PartitionSpec> partitionSpecs,
Map<String, List<String>> locationMap, List<String> indexOrMergeFiles, String tablePath) {
- for (String indexOrMergFile : indexOrMergeFiles) {
+ for (String indexOrMergeFile : indexOrMergeFiles) {
if (null != partitionSpecs) {
- Path location = new Path(indexOrMergFile);
+ Path location = new Path(indexOrMergeFile);
boolean exists = pathExistsInPartitionSpec(partitionSpecs, location);
if (!exists) {
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(location.toString()));
}
} else {
- Path location = new Path(indexOrMergFile);
+ Path location = new Path(indexOrMergeFile);
FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(location.toString()));
}
}
@@ -1300,7 +1300,7 @@ public class SegmentFileStore {
}
/**
- * Returs the current partition specs of this segment
+ * get the current partition specs of this segment
* @return
*/
public List<PartitionSpec> getPartitionSpecs() {
@@ -1320,7 +1320,7 @@ public class SegmentFileStore {
}
/**
- * This method returns the list of indx/merge index files for a segment in carbonTable.
+ * This method returns the list of index/merge index files for a segment in carbonTable.
*/
public static Set<String> getIndexFilesListForSegment(Segment segment, String tablePath)
throws IOException {
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/BlockletInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/BlockletInfo.java
index 63c7040..ab74e4d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/BlockletInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/blocklet/BlockletInfo.java
@@ -164,20 +164,20 @@ public class BlockletInfo implements Serializable, Writable {
public void write(DataOutput output) throws IOException {
output.writeLong(dimensionOffset);
output.writeLong(measureOffsets);
- int dsize = dimensionChunkOffsets != null ? dimensionChunkOffsets.size() : 0;
- output.writeShort(dsize);
- for (int i = 0; i < dsize; i++) {
+ int dimensionSize = dimensionChunkOffsets != null ? dimensionChunkOffsets.size() : 0;
+ output.writeShort(dimensionSize);
+ for (int i = 0; i < dimensionSize; i++) {
output.writeLong(dimensionChunkOffsets.get(i));
}
- for (int i = 0; i < dsize; i++) {
+ for (int i = 0; i < dimensionSize; i++) {
output.writeInt(dimensionChunksLength.get(i));
}
- int mSize = measureChunkOffsets != null ? measureChunkOffsets.size() : 0;
- output.writeShort(mSize);
- for (int i = 0; i < mSize; i++) {
+ int measureSize = measureChunkOffsets != null ? measureChunkOffsets.size() : 0;
+ output.writeShort(measureSize);
+ for (int i = 0; i < measureSize; i++) {
output.writeLong(measureChunkOffsets.get(i));
}
- for (int i = 0; i < mSize; i++) {
+ for (int i = 0; i < measureSize; i++) {
output.writeInt(measureChunksLength.get(i));
}
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
index 406e30d..fa2cf50 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
@@ -483,7 +483,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
}
/* (non-Javadoc)
- * convert from external to wrapper columnschema
+ * convert from external to wrapper column schema
*/
@Override
public ColumnSchema fromExternalToWrapperColumnSchema(
@@ -558,7 +558,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
}
/* (non-Javadoc)
- * convert from external to wrapper tableschema
+ * convert from external to wrapper table schema
*/
@Override
public TableSchema fromExternalToWrapperTableSchema(
@@ -603,7 +603,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
}
/* (non-Javadoc)
- * convert from external to wrapper tableinfo
+ * convert from external to wrapper table info
*/
@Override
public TableInfo fromExternalToWrapperTableInfo(
@@ -625,10 +625,10 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
}
private List<ParentColumnTableRelation> fromExternalToWrapperParentTableColumnRelations(
- List<org.apache.carbondata.format.ParentColumnTableRelation> thirftParentColumnRelation) {
+ List<org.apache.carbondata.format.ParentColumnTableRelation> thriftParentColumnRelation) {
List<ParentColumnTableRelation> parentColumnTableRelationList = new ArrayList<>();
for (org.apache.carbondata.format.ParentColumnTableRelation carbonTableRelation :
- thirftParentColumnRelation) {
+ thriftParentColumnRelation) {
RelationIdentifier relationIdentifier =
new RelationIdentifier(carbonTableRelation.getRelationIdentifier().getDatabaseName(),
carbonTableRelation.getRelationIdentifier().getTableName(),
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/SchemaEvolutionEntry.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/SchemaEvolutionEntry.java
index 33f6902..8fcfe49 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/SchemaEvolutionEntry.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/SchemaEvolutionEntry.java
@@ -23,12 +23,12 @@ import java.util.List;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
/**
- * Store the infomation about the schema evolution
+ * Store the information about the schema evolution
*/
public class SchemaEvolutionEntry implements Serializable {
/**
- * serilization version
+ * serialization version
*/
private static final long serialVersionUID = -7619477063676325276L;
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/indextable/IndexTableInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/indextable/IndexTableInfo.java
index 041066b..5da81af 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/indextable/IndexTableInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/indextable/IndexTableInfo.java
@@ -162,9 +162,9 @@ public class IndexTableInfo implements Serializable {
IndexTableInfo[] indexTableInfos = fromGson(oldIndexIno);
for (IndexTableInfo indexTableInfo : indexTableInfos) {
if (indexTableInfo.tableName.equalsIgnoreCase(indexName)) {
- Map<String, String> oldindexProperties = indexTableInfo.indexProperties;
- oldindexProperties.put(CarbonCommonConstants.INDEX_STATUS, IndexStatus.ENABLED.name());
- indexTableInfo.setIndexProperties(oldindexProperties);
+ Map<String, String> oldIndexProperties = indexTableInfo.indexProperties;
+ oldIndexProperties.put(CarbonCommonConstants.INDEX_STATUS, IndexStatus.ENABLED.name());
+ indexTableInfo.setIndexProperties(oldIndexProperties);
}
}
return toGson(indexTableInfos);
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index c678c73..a8cc9e9 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -95,7 +95,7 @@ public class CarbonTable implements Serializable, Writable {
// An ordered list, same order as when creating this table by user
private List<CarbonColumn> createOrderColumn;
- // Implicit columns that for internal usage, like positionid and tupleid for update/delete
+ // Implicit columns that for internal usage, like positionId and tupleId for update/delete
// operation. see CARBON_IMPLICIT_COLUMN_POSITIONID, CARBON_IMPLICIT_COLUMN_TUPLEID
private List<CarbonDimension> implicitDimensions;
@@ -146,7 +146,7 @@ public class CarbonTable implements Serializable, Writable {
}
/**
- * During creation of TableInfo from hivemetastore the IndexSchemas and the columns
+ * During creation of TableInfo from hive metastore the IndexSchemas and the columns
* DataTypes are not converted to the appropriate child classes.
* This method will cast the same to the appropriate classes
*/
@@ -341,7 +341,7 @@ public class CarbonTable implements Serializable, Writable {
complexDimension.initializeChildDimensionsList(columnSchema.getNumberOfChild());
allDimensions.add(complexDimension);
dimensionOrdinal =
- readAllComplexTypeChildrens(dimensionOrdinal, columnSchema.getNumberOfChild(),
+ readAllComplexTypeChildren(dimensionOrdinal, columnSchema.getNumberOfChild(),
listOfColumns, complexDimension);
i = dimensionOrdinal - 1;
complexTypeOrdinal = assignComplexOrdinal(complexDimension, complexTypeOrdinal);
@@ -377,7 +377,7 @@ public class CarbonTable implements Serializable, Writable {
}
/**
- * This method will add implicit dimension into carbontable
+ * This method will add implicit dimension into carbon table
*/
private void addImplicitDimension(int dimensionOrdinal, List<CarbonDimension> dimensions) {
dimensions.add(new CarbonImplicitDimension(dimensionOrdinal,
@@ -397,7 +397,7 @@ public class CarbonTable implements Serializable, Writable {
* Read all primitive/complex children and set it as list of child carbon dimension to parent
* dimension
*/
- private int readAllComplexTypeChildrens(int dimensionOrdinal, int childCount,
+ private int readAllComplexTypeChildren(int dimensionOrdinal, int childCount,
List<ColumnSchema> listOfColumns, CarbonDimension parentDimension) {
for (int i = 0; i < childCount; i++) {
ColumnSchema columnSchema = listOfColumns.get(dimensionOrdinal);
@@ -409,7 +409,7 @@ public class CarbonTable implements Serializable, Writable {
complexDimension.initializeChildDimensionsList(columnSchema.getNumberOfChild());
parentDimension.getListOfChildDimensions().add(complexDimension);
dimensionOrdinal =
- readAllComplexTypeChildrens(dimensionOrdinal, columnSchema.getNumberOfChild(),
+ readAllComplexTypeChildren(dimensionOrdinal, columnSchema.getNumberOfChild(),
listOfColumns, complexDimension);
} else {
CarbonDimension carbonDimension =
@@ -426,18 +426,18 @@ public class CarbonTable implements Serializable, Writable {
* Read all primitive/complex children and set it as list of child carbon dimension to parent
* dimension
*/
- private int assignComplexOrdinal(CarbonDimension parentDimension, int complexDimensionOrdianl) {
+ private int assignComplexOrdinal(CarbonDimension parentDimension, int complexDimensionOrdinal) {
for (int i = 0; i < parentDimension.getNumberOfChild(); i++) {
CarbonDimension dimension = parentDimension.getListOfChildDimensions().get(i);
if (dimension.getNumberOfChild() > 0) {
- dimension.setComplexTypeOridnal(++complexDimensionOrdianl);
- complexDimensionOrdianl = assignComplexOrdinal(dimension, complexDimensionOrdianl);
+ dimension.setComplexTypeOrdinal(++complexDimensionOrdinal);
+ complexDimensionOrdinal = assignComplexOrdinal(dimension, complexDimensionOrdinal);
} else {
parentDimension.getListOfChildDimensions().get(i)
- .setComplexTypeOridnal(++complexDimensionOrdianl);
+ .setComplexTypeOrdinal(++complexDimensionOrdinal);
}
}
- return complexDimensionOrdianl;
+ return complexDimensionOrdinal;
}
/**
@@ -448,14 +448,14 @@ public class CarbonTable implements Serializable, Writable {
}
/**
- * @return the tabelName
+ * @return the tableName
*/
public String getTableName() {
return tableInfo.getFactTable().getTableName();
}
/**
- * @return the tabelId
+ * @return the tableId
*/
public String getTableId() {
return tableInfo.getFactTable().getTableId();
@@ -927,7 +927,7 @@ public class CarbonTable implements Serializable, Writable {
* methods returns true if operation is allowed for the corresponding Index or not
* if this operation makes Index stale it is not allowed
*
- * @param carbonTable carbontable to be operated
+ * @param carbonTable carbon table to be operated
* @param operation which operation on the table,such as drop column,change datatype.
* @param targets objects which the operation impact on,such as column
* @return true allow;false not allow
@@ -950,7 +950,7 @@ public class CarbonTable implements Serializable, Writable {
}
} catch (Exception e) {
// since method returns true or false and based on that calling function throws exception, no
- // need to throw the catched exception
+ // need to throw the catch exception
LOGGER.error(e.getMessage(), e);
return true;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
index 05a6186..a2be5e6 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
@@ -79,9 +79,9 @@ public class TableInfo implements Serializable, Writable {
* or Transactional Table. The difference between Transactional and Non Transactional table is
* Non Transactional Table will not contain any Metadata folder and subsequently
* no TableStatus or Schema files.
- * All ACID properties cannot be aplied to Non Transactional Table as there is no Commit points
+ * All ACID properties cannot be applied to Non Transactional Table as there is no Commit points
* i.e. no TableStatus File.
- * What ever files present in the path will be read but it system doesnot ensure ACID rules for
+ * What ever files present in the path will be read but it system doesn't ensure ACID rules for
* this data, mostly Consistency part.
*
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
index 85c7674..28c4388 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
@@ -108,9 +108,9 @@ public class TableSchemaBuilder {
schema.setTableId(UUID.randomUUID().toString());
schema.setPartitionInfo(null);
schema.setBucketingInfo(null);
- SchemaEvolution schemaEvol = new SchemaEvolution();
- schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
- schema.setSchemaEvolution(schemaEvol);
+ SchemaEvolution schemaEvolution = new SchemaEvolution();
+ schemaEvolution.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
+ schema.setSchemaEvolution(schemaEvolution);
List<ColumnSchema> allColumns = new LinkedList<>(sortColumns);
allColumns.addAll(dimension);
allColumns.addAll(varCharColumns);
@@ -133,10 +133,9 @@ public class TableSchemaBuilder {
if (isLocalDictionaryEnabled) {
property.put(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE,
String.valueOf(isLocalDictionaryEnabled));
- String localdictionaryThreshold = localDictionaryThreshold.equalsIgnoreCase("0") ?
- CarbonCommonConstants.LOCAL_DICTIONARY_THRESHOLD_DEFAULT :
- localDictionaryThreshold;
- property.put(CarbonCommonConstants.LOCAL_DICTIONARY_THRESHOLD, localdictionaryThreshold);
+ String localDictionaryThreshold = this.localDictionaryThreshold.equalsIgnoreCase("0") ?
+ CarbonCommonConstants.LOCAL_DICTIONARY_THRESHOLD_DEFAULT : this.localDictionaryThreshold;
+ property.put(CarbonCommonConstants.LOCAL_DICTIONARY_THRESHOLD, localDictionaryThreshold);
for (int index = 0; index < allColumns.size(); index++) {
ColumnSchema colSchema = allColumns.get(index);
if (colSchema.getDataType() == DataTypes.STRING
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonColumn.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonColumn.java
index b095cb2..ae2775a 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonColumn.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonColumn.java
@@ -175,7 +175,7 @@ public class CarbonColumn implements Serializable {
}
/**
- * @return columnproperty
+ * @return column property map
*/
public Map<String, String> getColumnProperties() {
return this.columnSchema.getColumnProperties();
@@ -216,7 +216,7 @@ public class CarbonColumn implements Serializable {
this.useActualData = useActualData;
}
- public boolean isColmatchBasedOnId(CarbonColumn queryColumn) {
+ public boolean isColumnMatchBasedOnId(CarbonColumn queryColumn) {
return this.getColName().equalsIgnoreCase(this.getColumnId()) && this.getColName()
.equalsIgnoreCase(queryColumn.getColName());
}
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
index c93216b..39a706d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
@@ -35,7 +35,7 @@ public class CarbonDimension extends CarbonColumn {
/**
* in case of dictionary dimension this will store the ordinal
- * of the dimension in mdkey
+ * of the dimension in MDKey
*/
private int keyOrdinal;
@@ -86,7 +86,7 @@ public class CarbonDimension extends CarbonColumn {
return keyOrdinal;
}
- public void setComplexTypeOridnal(int complexTypeOrdinal) {
+ public void setComplexTypeOrdinal(int complexTypeOrdinal) {
}
/**
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonImplicitDimension.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonImplicitDimension.java
index 01222c6..b536726 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonImplicitDimension.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonImplicitDimension.java
@@ -27,7 +27,7 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.metadata.encoder.Encoding;
/**
- * This instance will be created for implicit column like tupleid.
+ * This instance will be created for implicit column like tupleId.
*/
public class CarbonImplicitDimension extends CarbonDimension {
/**
@@ -124,7 +124,7 @@ public class CarbonImplicitDimension extends CarbonDimension {
}
/**
- * To specify the visibily of the column by default its false
+ * To specify the visibility of the column by default its false
*/
public boolean isInvisible() {
return true;
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
index c43b76f..dd4b4a0 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
@@ -137,7 +137,7 @@ public class ColumnSchema implements Serializable, Writable, Cloneable {
private List<ParentColumnTableRelation> parentColumnTableRelations;
/**
- * timeseries function applied on column
+ * time-series function applied on column
*/
private String timeSeriesFunction = "";
@@ -419,7 +419,7 @@ public class ColumnSchema implements Serializable, Writable, Cloneable {
}
/**
- * return columnproperties
+ * return column properties
*/
public Map<String, String> getColumnProperties() {
return columnProperties;
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ParentColumnTableRelation.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ParentColumnTableRelation.java
index dd84ca2..fa73e88 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ParentColumnTableRelation.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ParentColumnTableRelation.java
@@ -40,10 +40,10 @@ public class ParentColumnTableRelation implements Serializable, Writable {
private String columnName;
- public ParentColumnTableRelation(RelationIdentifier relationIdentifier, String columId,
+ public ParentColumnTableRelation(RelationIdentifier relationIdentifier, String columnId,
String columnName) {
this.relationIdentifier = relationIdentifier;
- this.columnId = columId;
+ this.columnId = columnId;
this.columnName = columnName;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java b/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
index 77ebf3e..791c422 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
@@ -95,7 +95,7 @@ public class CarbonUpdateUtil {
*/
public static String getTableBlockPath(String tid, String tablePath, boolean isStandardTable) {
String partField = getRequiredFieldFromTID(tid, TupleIdEnum.PART_ID);
- // If it has segment file then partfield can be appended directly to table path
+ // If it has segment file then part field can be appended directly to table path
if (!isStandardTable) {
return tablePath + CarbonCommonConstants.FILE_SEPARATOR + partField.replace("#", "/");
}
@@ -173,7 +173,7 @@ public class CarbonUpdateUtil {
} finally {
if (lockStatus) {
if (updateLock.unlock()) {
- LOGGER.info("Unlock the segment update lock successfull.");
+ LOGGER.info("Unlock the segment update lock successful.");
} else {
LOGGER.error("Not able to unlock the segment update lock.");
}
@@ -215,15 +215,15 @@ public class CarbonUpdateUtil {
* @param updatedSegmentsList
* @param table
* @param updatedTimeStamp
- * @param isTimestampUpdationRequired
+ * @param isTimestampUpdateRequired
* @param segmentsToBeDeleted
* @return
*/
public static boolean updateTableMetadataStatus(Set<Segment> updatedSegmentsList,
- CarbonTable table, String updatedTimeStamp, boolean isTimestampUpdationRequired,
+ CarbonTable table, String updatedTimeStamp, boolean isTimestampUpdateRequired,
List<Segment> segmentsToBeDeleted) {
return updateTableMetadataStatus(updatedSegmentsList, table, updatedTimeStamp,
- isTimestampUpdationRequired, segmentsToBeDeleted, new ArrayList<Segment>(), "");
+ isTimestampUpdateRequired, segmentsToBeDeleted, new ArrayList<Segment>(), "");
}
/**
@@ -231,12 +231,12 @@ public class CarbonUpdateUtil {
* @param updatedSegmentsList
* @param table
* @param updatedTimeStamp
- * @param isTimestampUpdationRequired
+ * @param isTimestampUpdateRequired
* @param segmentsToBeDeleted
* @return
*/
public static boolean updateTableMetadataStatus(Set<Segment> updatedSegmentsList,
- CarbonTable table, String updatedTimeStamp, boolean isTimestampUpdationRequired,
+ CarbonTable table, String updatedTimeStamp, boolean isTimestampUpdateRequired,
List<Segment> segmentsToBeDeleted, List<Segment> segmentFilesTobeUpdated, String uuid) {
boolean status = false;
@@ -253,14 +253,14 @@ public class CarbonUpdateUtil {
if (lockStatus) {
LOGGER.info(
"Acquired lock for table" + table.getDatabaseName() + "." + table.getTableName()
- + " for table status updation");
+ + " for table status update");
LoadMetadataDetails[] listOfLoadFolderDetailsArray =
SegmentStatusManager.readLoadMetadata(metaDataFilepath);
for (LoadMetadataDetails loadMetadata : listOfLoadFolderDetailsArray) {
- if (isTimestampUpdationRequired) {
+ if (isTimestampUpdateRequired) {
// we are storing the link between the 2 status files in the segment 0 only.
if (loadMetadata.getLoadName().equalsIgnoreCase("0")) {
loadMetadata.setUpdateStatusFileName(
@@ -270,14 +270,14 @@ public class CarbonUpdateUtil {
// if the segments is in the list of marked for delete then update the status.
if (segmentsToBeDeleted.contains(new Segment(loadMetadata.getLoadName()))) {
loadMetadata.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE);
- loadMetadata.setModificationOrdeletionTimesStamp(Long.parseLong(updatedTimeStamp));
+ loadMetadata.setModificationOrDeletionTimestamp(Long.parseLong(updatedTimeStamp));
}
}
for (Segment segName : updatedSegmentsList) {
if (loadMetadata.getLoadName().equalsIgnoreCase(segName.getSegmentNo())) {
// if this call is coming from the delete delta flow then the time stamp
// String will come empty then no need to write into table status file.
- if (isTimestampUpdationRequired) {
+ if (isTimestampUpdateRequired) {
// if in case of update flow.
if (loadMetadata.getUpdateDeltaStartTimestamp().isEmpty()) {
// this means for first time it is getting updated .
@@ -304,19 +304,19 @@ public class CarbonUpdateUtil {
status = true;
} else {
- LOGGER.error("Not able to acquire the lock for Table status updation for table " + table
+ LOGGER.error("Not able to acquire the lock for Table status update for table " + table
.getDatabaseName() + "." + table.getTableName());
}
} finally {
if (lockStatus) {
if (carbonLock.unlock()) {
LOGGER.info(
- "Table unlocked successfully after table status updation" + table.getDatabaseName()
+ "Table unlocked successfully after table status update" + table.getDatabaseName()
+ "." + table.getTableName());
} else {
LOGGER.error(
"Unable to unlock Table lock for table" + table.getDatabaseName() + "." + table
- .getTableName() + " during table status updation");
+ .getTableName() + " during table status update");
}
}
}
@@ -375,14 +375,14 @@ public class CarbonUpdateUtil {
/**
* returns timestamp as long value
*
- * @param timtstamp
+ * @param timestamp
* @return
*/
- public static Long getTimeStampAsLong(String timtstamp) {
+ public static Long getTimeStampAsLong(String timestamp) {
try {
- return Long.parseLong(timtstamp);
+ return Long.parseLong(timestamp);
} catch (NumberFormatException nfe) {
- String errorMsg = "Invalid timestamp : " + timtstamp;
+ String errorMsg = "Invalid timestamp : " + timestamp;
LOGGER.error(errorMsg);
return null;
}
@@ -473,7 +473,7 @@ public class CarbonUpdateUtil {
}
/**
- * Handling of the clean up of old carbondata files, index files , delte delta,
+ * Handling of the clean up of old carbondata files, index files , delete delta,
* update status files.
* @param table clean up will be handled on this table.
* @param forceDelete if true then max query execution timeout will not be considered.
@@ -521,7 +521,7 @@ public class CarbonUpdateUtil {
FileFactory.getCarbonFile(segmentPath);
CarbonFile[] allSegmentFiles = segDir.listFiles();
- // scan through the segment and find the carbondatafiles and index files.
+ // scan through the segment and find the carbon data files and index files.
boolean updateSegmentFile = false;
// deleting of the aborted file scenario.
if (deleteStaleCarbonDataFiles(segment, allSegmentFiles, updateStatusManager)) {
@@ -719,7 +719,7 @@ public class CarbonUpdateUtil {
/**
* This function deletes all the stale carbondata files during clean up before update operation
- * one scenario is if update operation is ubruptly stopped before updation of table status then
+ * one scenario is if update operation is abruptly stopped before update of table status then
* the carbondata file created during update operation is stale file and it will be deleted in
* this function in next update operation
* @param segment
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java b/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
index abe8f6b..2c5fd9e 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/SegmentUpdateDetails.java
@@ -180,16 +180,16 @@ public class SegmentUpdateDetails implements Serializable {
/**
* returns timestamp as long value
*
- * @param timtstamp
+ * @param timestamp
* @return
*/
- private Long getTimeStampAsLong(String timtstamp) {
+ private Long getTimeStampAsLong(String timestamp) {
long longValue = 0;
try {
- longValue = Long.parseLong(timtstamp);
+ longValue = Long.parseLong(timestamp);
} catch (NumberFormatException nfe) {
if (LOGGER.isDebugEnabled()) {
- String errorMsg = "Invalid timestamp : " + timtstamp;
+ String errorMsg = "Invalid timestamp : " + timestamp;
LOGGER.debug(errorMsg);
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/data/BlockMappingVO.java b/core/src/main/java/org/apache/carbondata/core/mutate/data/BlockMappingVO.java
index 9f1c713..70a9136 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/data/BlockMappingVO.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/data/BlockMappingVO.java
@@ -31,7 +31,7 @@ public class BlockMappingVO {
private Map<String, RowCountDetailsVO> completeBlockRowDetailVO;
// This map will help us to finding the segment id from the block path.
- // key is 'blockpath' and value is 'segmentId'
+ // key is 'blockPath' and value is 'segmentId'
private Map<String, String> blockToSegmentMapping;
public void setCompleteBlockRowDetailVO(Map<String, RowCountDetailsVO> completeBlockRowDetailVO) {
diff --git a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesFunctionEnum.java b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesFunctionEnum.java
index f30923b..7cd5835 100644
--- a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesFunctionEnum.java
+++ b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesFunctionEnum.java
@@ -18,7 +18,7 @@
package org.apache.carbondata.core.preagg;
/**
- * enum for timeseries function
+ * enum for time-series function
*/
public enum TimeSeriesFunctionEnum {
SECOND("second", 0),
diff --git a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
index 36c0260..ce8c108 100644
--- a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
+++ b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
@@ -30,7 +30,7 @@ import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.log4j.Logger;
/**
- * class for applying timeseries udf
+ * class for applying time-series udf
*/
public class TimeSeriesUDF {
@@ -40,7 +40,7 @@ public class TimeSeriesUDF {
public final List<String> TIMESERIES_FUNCTION = new ArrayList<>();
// thread local for keeping calender instance
- private ThreadLocal<Calendar> calanderThreadLocal = new ThreadLocal<>();
+ private ThreadLocal<Calendar> calenderThreadLocal = new ThreadLocal<>();
/**
* singleton instance
@@ -73,7 +73,7 @@ public class TimeSeriesUDF {
return data;
}
initialize();
- Calendar calendar = calanderThreadLocal.get();
+ Calendar calendar = calenderThreadLocal.get();
calendar.clear();
calendar.setTimeInMillis(data.getTime());
TimeSeriesFunctionEnum timeSeriesFunctionEnum =
@@ -132,7 +132,7 @@ public class TimeSeriesUDF {
calendar.set(Calendar.MILLISECOND, 0);
break;
default:
- throw new IllegalArgumentException("Invalid timeseries function name: " + function);
+ throw new IllegalArgumentException("Invalid time-series function name: " + function);
}
data.setTime(calendar.getTimeInMillis());
return data;
@@ -151,8 +151,8 @@ public class TimeSeriesUDF {
* Below method will be used to initialize the thread local
*/
private void initialize() {
- if (calanderThreadLocal.get() == null) {
- calanderThreadLocal.set(new GregorianCalendar());
+ if (calenderThreadLocal.get() == null) {
+ calenderThreadLocal.set(new GregorianCalendar());
}
if (TIMESERIES_FUNCTION.isEmpty()) {
TIMESERIES_FUNCTION.add("second");
@@ -180,6 +180,6 @@ public class TimeSeriesUDF {
DaysOfWeekEnum.valueOf(CarbonCommonConstants.CARBON_TIMESERIES_FIRST_DAY_OF_WEEK_DEFAULT)
.getOrdinal();
}
- calanderThreadLocal.get().setFirstDayOfWeek(firstDayOfWeek);
+ calenderThreadLocal.get().setFirstDayOfWeek(firstDayOfWeek);
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/profiler/ExplainCollector.java b/core/src/main/java/org/apache/carbondata/core/profiler/ExplainCollector.java
index 4c73e37..a2bc865 100644
--- a/core/src/main/java/org/apache/carbondata/core/profiler/ExplainCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/profiler/ExplainCollector.java
@@ -159,7 +159,7 @@ public class ExplainCollector {
return output;
}
- public static String getFormatedOutput() {
+ public static String getFormattedOutput() {
if (null != get()) {
return get().toString();
} else {
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
index 4c73833..815efee 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
@@ -114,7 +114,7 @@ public class TableStatusReadCommittedScope implements ReadCommittedScope {
@Override
public void takeCarbonIndexFileSnapShot() throws IOException {
// Only Segment Information is updated.
- // File information will be fetched on the fly according to the fecthed segment info.
+ // File information will be fetched on the fly according to the fetched segment info.
this.loadMetadataDetails = SegmentStatusManager
.readTableStatusFile(CarbonTablePath.getTableStatusFilePath(identifier.getTablePath()));
}
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
index 686b0fa..e05e581 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
@@ -116,11 +116,11 @@ public class CarbonDeleteFilesDataReader {
/**
* Below method will be used to read the delete delta files
- * and get the map of blockletid and page id mapping to deleted
+ * and get the map of blockletId and page id mapping to deleted
* rows
*
* @param deltaFiles delete delta files array
- * @return map of blockletid_pageid to deleted rows
+ * @return map of blockletId_pageId to deleted rows
*/
public Map<String, DeleteDeltaVo> getDeletedRowsDataVo(String[] deltaFiles) {
List<Future<DeleteDeltaBlockDetails>> taskSubmitList = new ArrayList<>();
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
index 56985fe..c71722b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/ResultCollectorFactory.java
@@ -55,7 +55,7 @@ public class ResultCollectorFactory {
if (blockExecutionInfo.isRawRecordDetailQuery()) {
if (blockExecutionInfo.isRestructuredBlock()) {
if (blockExecutionInfo.isRequiredRowId()) {
- LOGGER.info("RowId Restructure based raw ollector is used to scan and collect the data");
+ LOGGER.info("RowId Restructure based raw collector is used to scan and collect the data");
scannerResultAggregator = new RowIdRestructureBasedRawResultCollector(blockExecutionInfo);
} else {
LOGGER.info("Restructure based raw collector is used to scan and collect the data");
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
index 1890f4b..947a561 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
@@ -73,11 +73,11 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
private byte[][] noDictionaryKeys;
private byte[][] complexTypeKeyArray;
- protected Map<Integer, GenericQueryType> comlexDimensionInfoMap;
+ protected Map<Integer, GenericQueryType> complexDimensionInfoMap;
/**
* Field of this Map is the parent Column and associated child columns.
- * Final Projection shuld be a merged list consist of only parents.
+ * Final Projection should be a merged list consist of only parents.
*/
private Map<Integer, List<Integer>> parentToChildColumnsMap = new HashMap<>();
@@ -101,7 +101,7 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
queryMeasures = executionInfo.getProjectionMeasures();
initDimensionAndMeasureIndexesForFillingData();
isDimensionExists = queryDimensions.length > 0;
- this.comlexDimensionInfoMap = executionInfo.getComlexDimensionInfoMap();
+ this.complexDimensionInfoMap = executionInfo.getComplexDimensionInfoMap();
this.readOnlyDelta = executionInfo.isReadOnlyDelta();
}
@@ -157,7 +157,7 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
fillComplexColumnDataBufferForThisRow();
for (int i = 0; i < queryDimensions.length; i++) {
fillDimensionData(scannedResult, surrogateResult, noDictionaryKeys, complexTypeKeyArray,
- comlexDimensionInfoMap, row, i, queryDimensions[i].getDimension().getOrdinal());
+ complexDimensionInfoMap, row, i, queryDimensions[i].getDimension().getOrdinal());
}
}
fillMeasureData(scannedResult, row);
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
index 9f18894..84c9cd0 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
@@ -70,7 +70,7 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
super(blockExecutionInfos);
this.isDirectVectorFill = blockExecutionInfos.isDirectVectorFill();
if (this.isDirectVectorFill) {
- LOGGER.info("Direct pagewise vector fill collector is used to scan and collect the data");
+ LOGGER.info("Direct page-wise vector fill collector is used to scan and collect the data");
}
// initialize only if the current block is not a restructured block else the initialization
// will be taken care by RestructureBasedVectorResultCollector
@@ -118,7 +118,7 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
columnVectorInfo.dimension = queryDimensions[i];
columnVectorInfo.ordinal = queryDimensions[i].getDimension().getOrdinal();
columnVectorInfo.genericQueryType =
- executionInfo.getComlexDimensionInfoMap().get(columnVectorInfo.ordinal);
+ executionInfo.getComplexDimensionInfoMap().get(columnVectorInfo.ordinal);
allColumnInfo[queryDimensions[i].getOrdinal()] = columnVectorInfo;
} else {
ColumnVectorInfo columnVectorInfo = new ColumnVectorInfo();
@@ -164,7 +164,7 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
if (isDirectVectorFill) {
collectResultInColumnarBatchDirect(scannedResult, columnarBatch);
} else {
- int numberOfPages = scannedResult.numberOfpages();
+ int numberOfPages = scannedResult.numberOfPages();
int filteredRows = 0;
while (scannedResult.getCurrentPageCounter() < numberOfPages) {
int currentPageRowCount = scannedResult.getCurrentPageRowCount();
@@ -226,7 +226,7 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
*/
private void collectResultInColumnarBatchDirect(BlockletScannedResult scannedResult,
CarbonColumnarBatch columnarBatch) {
- int numberOfPages = scannedResult.numberOfpages();
+ int numberOfPages = scannedResult.numberOfPages();
while (scannedResult.getCurrentPageCounter() < numberOfPages) {
int currentPageRowCount = scannedResult.getCurrentPageRowCount();
if (currentPageRowCount == 0) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollector.java
index c4851c7..4bdc19d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RawBasedResultCollector.java
@@ -67,7 +67,7 @@ public class RawBasedResultCollector extends AbstractScannedResultCollector {
*/
protected void scanAndFillData(BlockletScannedResult scannedResult, int batchSize,
List<Object[]> listBasedResult, ProjectionMeasure[] queryMeasures) {
- int numberOfPages = scannedResult.numberOfpages();
+ int numberOfPages = scannedResult.numberOfPages();
// loop will exit once the batchSize data has been read or the pages have been exhausted
while (scannedResult.getCurrentPageCounter() < numberOfPages) {
int currentPageRowCount = scannedResult.getCurrentPageRowCount();
@@ -88,7 +88,7 @@ public class RawBasedResultCollector extends AbstractScannedResultCollector {
if (batchSize > availableRows) {
batchSize = batchSize - availableRows;
} else {
- // this is done because in IUD cases actuals rows fetch can be less than batch size as
+ // this is done because in IUD cases actual rows fetch can be less than batch size as
// some of the rows could have deleted. So in those cases batchSize need to be
// re initialized with left over value
batchSize = 0;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedDictionaryResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedDictionaryResultCollector.java
index fa0b2a9..1422aac 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedDictionaryResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedDictionaryResultCollector.java
@@ -71,8 +71,8 @@ public class RestructureBasedDictionaryResultCollector extends DictionaryBasedRe
int[] surrogateResult;
byte[][] noDictionaryKeys;
byte[][] complexTypeKeyArray;
- Map<Integer, GenericQueryType> comlexDimensionInfoMap =
- executionInfo.getComlexDimensionInfoMap();
+ Map<Integer, GenericQueryType> complexDimensionInfoMap =
+ executionInfo.getComplexDimensionInfoMap();
while (scannedResult.hasNext() && rowCounter < batchSize) {
scannedResult.incrementCounter();
if (scannedResult.containsDeletedRow(scannedResult.getCurrentRowId())) {
@@ -102,7 +102,7 @@ public class RestructureBasedDictionaryResultCollector extends DictionaryBasedRe
continue;
}
fillDimensionData(scannedResult, surrogateResult, noDictionaryKeys, complexTypeKeyArray,
- comlexDimensionInfoMap, row, i, executionInfo
+ complexDimensionInfoMap, row, i, executionInfo
.getProjectionDimensions()[segmentDimensionsIdx++].getDimension().getOrdinal());
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
index 7d70517..22e41ca 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedVectorResultCollector.java
@@ -103,7 +103,7 @@ public class RestructureBasedVectorResultCollector extends DictionaryBasedVector
@Override
public void collectResultInColumnarBatch(BlockletScannedResult scannedResult,
CarbonColumnarBatch columnarBatch) {
- int numberOfPages = scannedResult.numberOfpages();
+ int numberOfPages = scannedResult.numberOfPages();
while (scannedResult.getCurrentPageCounter() < numberOfPages) {
int currentPageRowCount = scannedResult.getCurrentPageRowCount();
if (currentPageRowCount == 0) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdBasedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdBasedResultCollector.java
index 7a0732b..857b2f5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdBasedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdBasedResultCollector.java
@@ -61,7 +61,7 @@ public class RowIdBasedResultCollector extends DictionaryBasedResultCollector {
complexTypeColumnIndex = 0;
for (int i = 0; i < queryDimensions.length; i++) {
fillDimensionData(scannedResult, surrogateResult, noDictionaryKeys, complexTypeKeyArray,
- comlexDimensionInfoMap, row, i, queryDimensions[i].getDimension().getOrdinal());
+ complexDimensionInfoMap, row, i, queryDimensions[i].getDimension().getOrdinal());
}
}
row[columnCount + 2] = scannedResult.getCurrentRowId();
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRawBasedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRawBasedResultCollector.java
index a5625e2..fd23bb9 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRawBasedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRawBasedResultCollector.java
@@ -32,10 +32,10 @@ import org.apache.carbondata.core.stats.QueryStatisticsConstants;
/**
* It is not a collector it is just a scanned result holder.
- * most of the lines are copyied from `RawBasedResultCollector`, the difference in function is that
+ * most of the lines are copied from `RawBasedResultCollector`, the difference in function is that
* this class return all the dimensions in a ByteArrayWrapper and append blockletNo/PageId/RowId at
* end of the row.
- * This implementation refers to `RawBasedResultCollector` and `RowIdBaedResultCollector`
+ * This implementation refers to `RawBasedResultCollector` and `RowIdBasedResultCollector`
*/
@InterfaceAudience.Internal
public class RowIdRawBasedResultCollector extends AbstractScannedResultCollector {
@@ -73,7 +73,7 @@ public class RowIdRawBasedResultCollector extends AbstractScannedResultCollector
*/
protected void scanAndFillData(BlockletScannedResult scannedResult, int batchSize,
List<Object[]> listBasedResult, ProjectionMeasure[] queryMeasures) {
- int numberOfPages = scannedResult.numberOfpages();
+ int numberOfPages = scannedResult.numberOfPages();
// loop will exit once the batchSize data has been read or the pages have been exhausted
while (scannedResult.getCurrentPageCounter() < numberOfPages) {
int currentPageRowCount = scannedResult.getCurrentPageRowCount();
@@ -94,7 +94,7 @@ public class RowIdRawBasedResultCollector extends AbstractScannedResultCollector
if (batchSize > availableRows) {
batchSize = batchSize - availableRows;
} else {
- // this is done because in IUD cases actuals rows fetch can be less than batch size as
+ // this is done because in IUD cases actual rows fetch can be less than batch size as
// some of the rows could have deleted. So in those cases batchSize need to be
// re initialized with left over value
batchSize = 0;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRestructureBasedRawResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRestructureBasedRawResultCollector.java
index a8d99d2..592e439 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRestructureBasedRawResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RowIdRestructureBasedRawResultCollector.java
@@ -45,7 +45,7 @@ public class RowIdRestructureBasedRawResultCollector extends RestructureBasedRaw
@Override
protected void scanAndFillData(BlockletScannedResult scannedResult, int batchSize,
List<Object[]> listBasedResult, ProjectionMeasure[] queryMeasures) {
- int numberOfPages = scannedResult.numberOfpages();
+ int numberOfPages = scannedResult.numberOfPages();
// loop will exit once the batchSize data has been read or the pages have been exhausted
while (scannedResult.getCurrentPageCounter() < numberOfPages) {
int currentPageRowCount = scannedResult.getCurrentPageRowCount();
@@ -66,7 +66,7 @@ public class RowIdRestructureBasedRawResultCollector extends RestructureBasedRaw
if (batchSize > availableRows) {
batchSize = batchSize - availableRows;
} else {
- // this is done because in IUD cases actuals rows fetch can be less than batch size as
+ // this is done because in IUD cases actual rows fetch can be less than batch size as
// some of the rows could have deleted. So in those cases batchSize need to be
// re initialized with left over value
batchSize = 0;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index c1dda69..0623177 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -238,8 +238,8 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
* It updates dimensions and measures of query model. In few scenarios like SDK user can configure
* sort options per load, so if first load has c1 as integer column and configure as sort column
* then carbon treat that as dimension.But in second load if user change the sort option then the
- * c1 become measure as bydefault integers are measures. So this method updates the measures to
- * dimensions and vice versa as per the indexfile schema.
+ * c1 become measure as by default integers are measures. So this method updates the measures to
+ * dimensions and vice versa as per the index file schema.
*/
private void updateColumns(QueryModel queryModel, List<ColumnSchema> columnsInTable,
String filePath) throws IOException {
@@ -262,7 +262,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
List<ProjectionDimension> updatedDims = new ArrayList<>();
List<ProjectionMeasure> updatedMsrs = new ArrayList<>();
- // Check and update dimensions to measures if it is measure in indexfile schema
+ // Check and update dimensions to measures if it is measure in index file schema
for (ProjectionDimension dimension : dimensions) {
int index = columnsInTable.indexOf(dimension.getDimension().getColumnSchema());
if (index > -1) {
@@ -280,7 +280,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
}
}
- // Check and update measure to dimension if it is dimension in indexfile schema.
+ // Check and update measure to dimension if it is dimension in index file schema.
for (ProjectionMeasure measure : measures) {
int index = columnsInTable.indexOf(measure.getMeasure().getColumnSchema());
if (index > -1) {
@@ -491,9 +491,9 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
// loading the filter executor tree for filter evaluation
filterResolverIntf = queryModel.getIndexFilter().getResolver();
}
- blockExecutionInfo.setFilterExecuterTree(
- FilterUtil.getFilterExecuterTree(filterResolverIntf, segmentProperties,
- blockExecutionInfo.getComlexDimensionInfoMap(), false));
+ blockExecutionInfo.setFilterExecutorTree(
+ FilterUtil.getFilterExecutorTree(filterResolverIntf, segmentProperties,
+ blockExecutionInfo.getComplexDimensionInfoMap(), false));
}
// expression measure
List<CarbonMeasure> expressionMeasures =
@@ -501,13 +501,13 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
// setting all the dimension chunk indexes to be read from file
int numberOfElementToConsider = 0;
// list of dimensions to be projected
- Set<Integer> allProjectionListDimensionIdexes = new LinkedHashSet<>();
+ Set<Integer> allProjectionListDimensionIndexes = new LinkedHashSet<>();
// create a list of filter dimensions present in the current block
Set<CarbonDimension> currentBlockFilterDimensions =
getCurrentBlockFilterDimensions(queryProperties.complexFilterDimension, segmentProperties);
int[] dimensionChunkIndexes = QueryUtil.getDimensionChunkIndexes(projectDimensions,
segmentProperties.getDimensionOrdinalToChunkMapping(),
- currentBlockFilterDimensions, allProjectionListDimensionIdexes);
+ currentBlockFilterDimensions, allProjectionListDimensionIndexes);
ReusableDataBuffer[] dimensionBuffer = new ReusableDataBuffer[projectDimensions.size()];
for (int i = 0; i < dimensionBuffer.length; i++) {
dimensionBuffer[i] = new ReusableDataBuffer();
@@ -515,11 +515,11 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
blockExecutionInfo.setDimensionReusableDataBuffer(dimensionBuffer);
int numberOfColumnToBeReadInOneIO = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO,
- CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE));
+ CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULT_VALUE));
if (dimensionChunkIndexes.length > 0) {
numberOfElementToConsider = dimensionChunkIndexes[dimensionChunkIndexes.length - 1]
- == segmentProperties.getBlockTodimensionOrdinalMapping().size() - 1 ?
+ == segmentProperties.getBlockToDimensionOrdinalMapping().size() - 1 ?
dimensionChunkIndexes.length - 1 :
dimensionChunkIndexes.length;
blockExecutionInfo.setAllSelectedDimensionColumnIndexRange(
@@ -559,8 +559,8 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
}
// setting the indexes of list of dimension in projection list
blockExecutionInfo.setProjectionListDimensionIndexes(ArrayUtils.toPrimitive(
- allProjectionListDimensionIdexes
- .toArray(new Integer[allProjectionListDimensionIdexes.size()])));
+ allProjectionListDimensionIndexes
+ .toArray(new Integer[allProjectionListDimensionIndexes.size()])));
// setting the indexes of list of measures in projection list
blockExecutionInfo.setProjectionListMeasureIndexes(ArrayUtils.toPrimitive(
allProjectionListMeasureIndexes
@@ -720,7 +720,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
exceptionOccurred = e;
}
}
- // clear all the unsafe memory used for the given task ID only if it is neccessary to be cleared
+ // clear all the unsafe memory used for the given task ID only if it is necessary to be cleared
if (freeUnsafeMemory) {
UnsafeMemoryManager.INSTANCE
.freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo().getTaskId());
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
index e3b4cd9..9755761 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
@@ -24,7 +24,7 @@ import org.apache.carbondata.core.datastore.ReusableDataBuffer;
import org.apache.carbondata.core.datastore.block.AbstractIndex;
import org.apache.carbondata.core.mutate.DeleteDeltaVo;
import org.apache.carbondata.core.scan.filter.GenericQueryType;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.model.ProjectionDimension;
import org.apache.carbondata.core.scan.model.ProjectionMeasure;
import org.apache.carbondata.core.stats.QueryStatisticsModel;
@@ -105,7 +105,7 @@ public class BlockExecutionInfo {
/**
* filter tree to execute the filter
*/
- private FilterExecuter filterExecuterTree;
+ private FilterExecutor filterExecutorTree;
/**
* whether it needs only raw byte records with out aggregation.
@@ -335,15 +335,15 @@ public class BlockExecutionInfo {
/**
* @return the filterEvaluatorTree
*/
- public FilterExecuter getFilterExecuterTree() {
- return filterExecuterTree;
+ public FilterExecutor getFilterExecutorTree() {
+ return filterExecutorTree;
}
/**
- * @param filterExecuterTree the filterEvaluatorTree to set
+ * @param filterExecutorTree the filterEvaluatorTree to set
*/
- public void setFilterExecuterTree(FilterExecuter filterExecuterTree) {
- this.filterExecuterTree = filterExecuterTree;
+ public void setFilterExecutorTree(FilterExecutor filterExecutorTree) {
+ this.filterExecutorTree = filterExecutorTree;
}
/**
@@ -385,7 +385,7 @@ public class BlockExecutionInfo {
/**
* @return the complexParentIndexToQueryMap
*/
- public Map<Integer, GenericQueryType> getComlexDimensionInfoMap() {
+ public Map<Integer, GenericQueryType> getComplexDimensionInfoMap() {
return complexParentIndexToQueryMap;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
index 53a9358..1b10817 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
@@ -126,12 +126,12 @@ public class QueryUtil {
*/
public static int[] getMeasureChunkIndexes(List<ProjectionMeasure> queryMeasures,
List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> ordinalToBlockIndexMapping,
- Set<CarbonMeasure> filterMeasures, List<Integer> allProjectionListMeasureIdexes) {
+ Set<CarbonMeasure> filterMeasures, List<Integer> allProjectionListMeasureIndexes) {
Set<Integer> measureChunkIndex = new HashSet<Integer>();
Set<Integer> filterMeasureOrdinal = getFilterMeasureOrdinal(filterMeasures);
for (int i = 0; i < queryMeasures.size(); i++) {
Integer measureOrdinal = queryMeasures.get(i).getMeasure().getOrdinal();
- allProjectionListMeasureIdexes.add(measureOrdinal);
+ allProjectionListMeasureIndexes.add(measureOrdinal);
if (!filterMeasureOrdinal.contains(measureOrdinal)) {
measureChunkIndex.add(ordinalToBlockIndexMapping.get(measureOrdinal));
}
@@ -422,9 +422,9 @@ public class QueryUtil {
}
/**
- * In case of non transactional table just set columnuniqueid as columnName to support
- * backward compatabiity. non transactional tables column uniqueid is always equal to
- * columnname
+ * In case of non transactional table just set column unique id as columnName to support
+ * backward compatibility. non transactional tables column unique id is always equal to
+ * column name
*/
public static void updateColumnUniqueIdForNonTransactionTable(List<ColumnSchema> columnSchemas) {
for (ColumnSchema columnSchema : columnSchemas) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
index 921e34d..f90ba32 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
@@ -52,7 +52,7 @@ import org.apache.commons.lang3.ArrayUtils;
public class RestructureUtil {
/**
- * Below method will be used to get the updated query dimension updation
+ * Below method will be used to get the updated query dimension update
* means, after restructuring some dimension will be not present in older
* table blocks in that case we need to select only those dimension out of
* query dimension which is present in the current table block
@@ -168,7 +168,7 @@ public class RestructureUtil {
if (tableColumn.getDataType().isComplexType() && !(tableColumn.getDataType().getId()
== DataTypes.ARRAY_TYPE_ID)) {
if (tableColumn.getColumnId().equalsIgnoreCase(queryColumn.getColumnId()) || tableColumn
- .isColmatchBasedOnId(queryColumn)) {
+ .isColumnMatchBasedOnId(queryColumn)) {
return true;
} else {
return isColumnMatchesStruct(tableColumn, queryColumn);
@@ -180,12 +180,12 @@ public class RestructureUtil {
// In case of SDK, columnId is same as columnName therefore the following check will
// ensure that if the table columnName is same as the query columnName and the table
// columnId is the same as table columnName then it's a valid columnName to be scanned.
- || tableColumn.isColmatchBasedOnId(queryColumn));
+ || tableColumn.isColumnMatchBasedOnId(queryColumn));
}
}
/**
- * In case of Multilevel Complex column - STRUCT/STRUCTofSTRUCT, traverse all the child dimension
+ * In case of Multilevel Complex column - Struct/StructOfStruct, traverse all the child dimension
* to check column Id
*
* @param tableColumn
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java
index 2513b0d..24c6312 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java
@@ -34,7 +34,7 @@ public abstract class Expression implements Serializable {
new ArrayList<Expression>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
// When a filter expression already has the dictionary surrogate values in
- // it then we set isAlreadyResolved as true so that we donot resolve the
+ // it then we set isAlreadyResolved as true so that we do not resolve the
// filter expression in further steps.
protected boolean isAlreadyResolved;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
index 88667eb..be3c9fa 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/RangeExpressionEvaluator.java
@@ -77,13 +77,13 @@ public class RangeExpressionEvaluator {
}
/**
- * This method evaluates is any greaterthan or less than expression can be transformed
+ * This method evaluates is any greater than or less than expression can be transformed
* into a single RANGE filter.
*/
public void rangeExpressionEvaluatorMapBased() {
// The algorithm :
// Get all the nodes of the Expression Tree and fill it into a MAP.
- // The Map structure will be currentNode, ColumnName, LessThanOrgreaterThan, Value, ParentNode
+ // The Map structure will be currentNode, ColumnName, LessThanOrGreaterThan, Value, ParentNode
// Group the rows in MAP according to the columns and then evaluate if it can be transformed
// into a RANGE or not.
//
@@ -225,7 +225,7 @@ public class RangeExpressionEvaluator {
// if the parentNode is a ANDExpression and the current node is LessThan, GreaterThan
// then add the node into filterExpressionMap.
if ((parentNode instanceof AndExpression) && (isLessThanGreaterThanExp(currentNode)
- && eligibleForRangeExpConv(currentNode))) {
+ && eligibleForRangeExpConversion(currentNode))) {
addFilterExpressionMap(filterExpressionMap, currentNode, parentNode);
}
// In case of Or Exp we have to evaluate both the subtrees of expression separately
@@ -282,7 +282,7 @@ public class RangeExpressionEvaluator {
* @param expChild
* @return
*/
- private boolean eligibleForRangeExpConv(Expression expChild) {
+ private boolean eligibleForRangeExpConversion(Expression expChild) {
for (Expression exp : expChild.getChildren()) {
if (exp instanceof ColumnExpression) {
return ((ColumnExpression) exp).isDimension() &&
@@ -393,7 +393,7 @@ public class RangeExpressionEvaluator {
/**
* This Method Traverses the Expression Tree to find the corresponding node of the Range
* Expression. If one node of Range Expression is LessThan then a corresponding GreaterThan
- * will be choosen or vice versa.
+ * will be chosen or vice versa.
*
* @param currentNode
* @param parentNode
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/UnknownExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/UnknownExpression.java
index cbea664..9ca8a94 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/UnknownExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/UnknownExpression.java
@@ -20,14 +20,14 @@ package org.apache.carbondata.core.scan.expression;
import java.util.List;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
public abstract class UnknownExpression extends Expression {
public abstract List<ColumnExpression> getAllColumnList();
- public FilterExecuter getFilterExecuter(FilterResolverIntf filterResolverIntf,
+ public FilterExecutor getFilterExecutor(FilterResolverIntf filterResolverIntf,
SegmentProperties segmentProperties) {
return null;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/ConditionalExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/ConditionalExpression.java
index d7b940c..dd67c54 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/ConditionalExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/ConditionalExpression.java
@@ -24,7 +24,7 @@ import org.apache.carbondata.core.scan.expression.ExpressionResult;
public interface ConditionalExpression {
- // Will get the column informations involved in the expressions by
+ // Will get the column information involved in the expressions by
// traversing the tree
List<ColumnExpression> getColumnList();
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java
index fb5e9e4..fe0d5fd 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java
@@ -59,7 +59,7 @@ public class EqualToExpression extends BinaryConditionalExpression {
}
return elRes;
}
- //default implementation if the data types are different for the resultsets
+ //default implementation if the data types are different for the result sets
if (elRes.getDataType() != erRes.getDataType()) {
if (elRes.getDataType().getPrecedenceOrder() < erRes.getDataType().getPrecedenceOrder()) {
val2 = elRes;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java
index 390ff28..7a1607c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java
@@ -41,16 +41,16 @@ public class InExpression extends BinaryConditionalExpression {
@Override
public ExpressionResult evaluate(RowIntf value)
throws FilterUnsupportedException, FilterIllegalMemberException {
- ExpressionResult leftRsult = left.evaluate(value);
+ ExpressionResult leftResult = left.evaluate(value);
if (setOfExprResult == null) {
- ExpressionResult rightRsult = right.evaluate(value);
+ ExpressionResult rightResult = right.evaluate(value);
ExpressionResult val = null;
setOfExprResult = new HashSet<ExpressionResult>(10);
- for (ExpressionResult expressionResVal : rightRsult.getList()) {
- if (expressionResVal.getDataType().getPrecedenceOrder() < leftRsult.getDataType()
+ for (ExpressionResult expressionResVal : rightResult.getList()) {
+ if (expressionResVal.getDataType().getPrecedenceOrder() < leftResult.getDataType()
.getPrecedenceOrder()) {
- val = leftRsult;
+ val = leftResult;
} else {
val = expressionResVal;
}
@@ -88,12 +88,12 @@ public class InExpression extends BinaryConditionalExpression {
// Left check will cover both the cases when left and right is null therefore no need
// for a check on the right result.
// Example: (null==null) -> Left null return false, (1==null) would automatically be false.
- if (leftRsult.isNull()) {
- leftRsult.set(DataTypes.BOOLEAN, false);
+ if (leftResult.isNull()) {
+ leftResult.set(DataTypes.BOOLEAN, false);
} else {
- leftRsult.set(DataTypes.BOOLEAN, setOfExprResult.contains(leftRsult));
+ leftResult.set(DataTypes.BOOLEAN, setOfExprResult.contains(leftResult));
}
- return leftRsult;
+ return leftResult;
}
@Override
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java
index 69c7cc5..f393754 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java
@@ -56,7 +56,7 @@ public class NotEqualsExpression extends BinaryConditionalExpression {
}
return elRes;
}
- //default implementation if the data types are different for the resultsets
+ //default implementation if the data types are different for the result sets
if (elRes.getDataType() != erRes.getDataType()) {
if (elRes.getDataType().getPrecedenceOrder() < erRes.getDataType().getPrecedenceOrder()) {
val1 = erRes;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java
index 6bd09a5..1ce7e98 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java
@@ -51,27 +51,27 @@ public class NotInExpression extends BinaryConditionalExpression {
return nullValuePresent;
}
- ExpressionResult leftRsult = left.evaluate(value);
- if (leftRsult.isNull()) {
- leftRsult.set(DataTypes.BOOLEAN, false);
- return leftRsult;
+ ExpressionResult leftResult = left.evaluate(value);
+ if (leftResult.isNull()) {
+ leftResult.set(DataTypes.BOOLEAN, false);
+ return leftResult;
}
if (setOfExprResult == null) {
ExpressionResult val = null;
- ExpressionResult rightRsult = right.evaluate(value);
+ ExpressionResult rightResult = right.evaluate(value);
setOfExprResult = new HashSet<ExpressionResult>(10);
- for (ExpressionResult exprResVal : rightRsult.getList()) {
+ for (ExpressionResult exprResVal : rightResult.getList()) {
if (exprResVal.isNull()) {
nullValuePresent = new ExpressionResult(DataTypes.BOOLEAN, false);
- leftRsult.set(DataTypes.BOOLEAN, false);
- return leftRsult;
+ leftResult.set(DataTypes.BOOLEAN, false);
+ return leftResult;
}
- if (exprResVal.getDataType().getPrecedenceOrder() < leftRsult.getDataType()
+ if (exprResVal.getDataType().getPrecedenceOrder() < leftResult.getDataType()
.getPrecedenceOrder()) {
- val = leftRsult;
+ val = leftResult;
} else {
val = exprResVal;
}
@@ -103,8 +103,8 @@ public class NotInExpression extends BinaryConditionalExpression {
}
}
- leftRsult.set(DataTypes.BOOLEAN, !setOfExprResult.contains(leftRsult));
- return leftRsult;
+ leftResult.set(DataTypes.BOOLEAN, !setOfExprResult.contains(leftResult));
+ return leftResult;
}
@Override
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/logical/BinaryLogicalExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/logical/BinaryLogicalExpression.java
index ecbb8d9..06d8150 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/logical/BinaryLogicalExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/logical/BinaryLogicalExpression.java
@@ -48,7 +48,7 @@ public abstract class BinaryLogicalExpression extends BinaryExpression {
return listOfExp;
}
- // Will get the column informations involved in the expressions by
+ // Will get the column information involved in the expressions by
// traversing the tree
public List<ColumnExpression> getColumnList() {
// TODO
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExecutorUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExecutorUtil.java
index efd219f..3e21640 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExecutorUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExecutorUtil.java
@@ -27,7 +27,7 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
import org.apache.carbondata.core.scan.filter.executer.FilterBitSetUpdater;
-import org.apache.carbondata.core.scan.filter.executer.MeasureColumnExecuterFilterInfo;
+import org.apache.carbondata.core.scan.filter.executer.MeasureColumnExecutorFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
import org.apache.carbondata.core.util.DataTypeUtil;
@@ -50,19 +50,19 @@ public class FilterExecutorUtil {
*
* @param page
* @param bitSet
- * @param measureColumnExecuterFilterInfo
+ * @param measureColumnExecutorFilterInfo
* @param measureColumnResolvedFilterInfo
* @param filterBitSetUpdater
*/
public static void executeIncludeExcludeFilterForMeasure(ColumnPage page, BitSet bitSet,
- MeasureColumnExecuterFilterInfo measureColumnExecuterFilterInfo,
+ MeasureColumnExecutorFilterInfo measureColumnExecutorFilterInfo,
MeasureColumnResolvedFilterInfo measureColumnResolvedFilterInfo,
FilterBitSetUpdater filterBitSetUpdater) {
final CarbonMeasure measure = measureColumnResolvedFilterInfo.getMeasure();
final DataType dataType = FilterUtil.getMeasureDataType(measureColumnResolvedFilterInfo);
int numberOfRows = page.getPageSize();
BitSet nullBitSet = page.getNullBits();
- Object[] filterKeys = measureColumnExecuterFilterInfo.getFilterKeys();
+ Object[] filterKeys = measureColumnExecutorFilterInfo.getFilterKeys();
// to handle the null value
for (int i = 0; i < filterKeys.length; i++) {
if (filterKeys[i] == null) {
@@ -71,7 +71,7 @@ public class FilterExecutorUtil {
}
}
}
- AbstractCollection filterSet = measureColumnExecuterFilterInfo.getFilterSet();
+ AbstractCollection filterSet = measureColumnExecutorFilterInfo.getFilterSet();
if (dataType == DataTypes.BYTE) {
ByteOpenHashSet byteOpenHashSet = (ByteOpenHashSet) filterSet;
for (int i = 0; i < numberOfRows; i++) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
index e5405ce..0c1ab05 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -36,7 +36,7 @@ import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedExc
import org.apache.carbondata.core.scan.expression.logical.AndExpression;
import org.apache.carbondata.core.scan.expression.logical.OrExpression;
import org.apache.carbondata.core.scan.expression.logical.TrueExpression;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.executer.ImplicitColumnFilterExecutor;
import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
import org.apache.carbondata.core.scan.filter.resolver.ConditionalFilterResolverImpl;
@@ -59,26 +59,26 @@ public class FilterExpressionProcessor implements FilterProcessor {
* filter expression tree which is been passed in Expression instance.
*
* @param expressionTree , filter expression tree
- * @param tableIdentifier ,contains carbon store informations
+ * @param tableIdentifier ,contains carbon store information
* @return a filter resolver tree
*/
public FilterResolverIntf getFilterResolver(Expression expressionTree,
AbsoluteTableIdentifier tableIdentifier) throws FilterUnsupportedException {
if (null != expressionTree && null != tableIdentifier) {
- return getFilterResolvertree(expressionTree, tableIdentifier);
+ return getFilterResolverTree(expressionTree, tableIdentifier);
}
return null;
}
/**
* API will return a filter resolver instance which will be used by
- * executers to evaluate or execute the filters.
+ * executors to evaluate or execute the filters.
*
* @param expressionTree , resolver tree which will hold the resolver tree based on
* filter expression.
* @return FilterResolverIntf type.
*/
- private FilterResolverIntf getFilterResolvertree(Expression expressionTree,
+ private FilterResolverIntf getFilterResolverTree(Expression expressionTree,
AbsoluteTableIdentifier tableIdentifier) throws FilterUnsupportedException {
FilterResolverIntf filterEvaluatorTree =
createFilterResolverTree(expressionTree, tableIdentifier);
@@ -311,14 +311,14 @@ public class FilterExpressionProcessor implements FilterProcessor {
return new RowLevelFilterResolverImpl(expression, false, false, tableIdentifier);
}
- public static boolean isScanRequired(FilterExecuter filterExecuter, byte[][] maxValue,
+ public static boolean isScanRequired(FilterExecutor filterExecutor, byte[][] maxValue,
byte[][] minValue, boolean[] isMinMaxSet) {
- if (filterExecuter instanceof ImplicitColumnFilterExecutor) {
- return ((ImplicitColumnFilterExecutor) filterExecuter)
+ if (filterExecutor instanceof ImplicitColumnFilterExecutor) {
+ return ((ImplicitColumnFilterExecutor) filterExecutor)
.isFilterValuesPresentInAbstractIndex(maxValue, minValue, isMinMaxSet);
} else {
// otherwise decide based on min/max value
- BitSet bitSet = filterExecuter.isScanRequired(maxValue, minValue, isMinMaxSet);
+ BitSet bitSet = filterExecutor.isScanRequired(maxValue, minValue, isMinMaxSet);
return !bitSet.isEmpty();
}
}
@@ -353,12 +353,12 @@ public class FilterExpressionProcessor implements FilterProcessor {
}
/**
- * Change UnknownReslover to TrueExpression Reslover.
+ * Change UnknownResolver to TrueExpression Resolver.
*
* @param tableIdentifier
* @return
*/
- public FilterResolverIntf changeUnknownResloverToTrue(AbsoluteTableIdentifier tableIdentifier) {
+ public FilterResolverIntf changeUnknownResolverToTrue(AbsoluteTableIdentifier tableIdentifier) {
return getFilterResolverBasedOnExpressionType(ExpressionType.TRUE, false,
new TrueExpression(null), tableIdentifier, new TrueExpression(null));
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterProcessor.java
index 9ef34d9..f6915d7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterProcessor.java
@@ -29,7 +29,7 @@ public interface FilterProcessor {
* expression tree which is been passed.
*
* @param expressionTree , filter expression tree
- * @param tableIdentifier ,contains carbon store informations.
+ * @param tableIdentifier ,contains carbon store information.
* @return
* @throws FilterUnsupportedException
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 6f121ca..3053d91 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -56,23 +56,23 @@ import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedExc
import org.apache.carbondata.core.scan.expression.logical.AndExpression;
import org.apache.carbondata.core.scan.expression.logical.OrExpression;
import org.apache.carbondata.core.scan.expression.logical.TrueExpression;
-import org.apache.carbondata.core.scan.filter.executer.AndFilterExecuterImpl;
-import org.apache.carbondata.core.scan.filter.executer.DimColumnExecuterFilterInfo;
-import org.apache.carbondata.core.scan.filter.executer.ExcludeFilterExecuterImpl;
+import org.apache.carbondata.core.scan.filter.executer.AndFilterExecutorImpl;
+import org.apache.carbondata.core.scan.filter.executer.DimColumnExecutorFilterInfo;
+import org.apache.carbondata.core.scan.filter.executer.ExcludeFilterExecutorImpl;
import org.apache.carbondata.core.scan.filter.executer.FalseFilterExecutor;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.executer.ImplicitIncludeFilterExecutorImpl;
-import org.apache.carbondata.core.scan.filter.executer.IncludeFilterExecuterImpl;
-import org.apache.carbondata.core.scan.filter.executer.MeasureColumnExecuterFilterInfo;
-import org.apache.carbondata.core.scan.filter.executer.OrFilterExecuterImpl;
-import org.apache.carbondata.core.scan.filter.executer.RangeValueFilterExecuterImpl;
+import org.apache.carbondata.core.scan.filter.executer.IncludeFilterExecutorImpl;
+import org.apache.carbondata.core.scan.filter.executer.MeasureColumnExecutorFilterInfo;
+import org.apache.carbondata.core.scan.filter.executer.OrFilterExecutorImpl;
+import org.apache.carbondata.core.scan.filter.executer.RangeValueFilterExecutorImpl;
import org.apache.carbondata.core.scan.filter.executer.RestructureExcludeFilterExecutorImpl;
import org.apache.carbondata.core.scan.filter.executer.RestructureIncludeFilterExecutorImpl;
-import org.apache.carbondata.core.scan.filter.executer.RowLevelFilterExecuterImpl;
-import org.apache.carbondata.core.scan.filter.executer.RowLevelRangeTypeExecuterFactory;
+import org.apache.carbondata.core.scan.filter.executer.RowLevelFilterExecutorImpl;
+import org.apache.carbondata.core.scan.filter.executer.RowLevelRangeTypeExecutorFactory;
import org.apache.carbondata.core.scan.filter.executer.TrueFilterExecutor;
import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.intf.RowImpl;
import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.filter.resolver.ConditionalFilterResolverImpl;
@@ -104,8 +104,8 @@ public final class FilterUtil {
/**
* Pattern used : Visitor Pattern
- * Method will create filter executer tree based on the filter resolved tree,
- * in this algorithm based on the resolver instance the executers will be visited
+ * Method will create filter executor tree based on the filter resolved tree,
+ * in this algorithm based on the resolver instance the executors will be visited
* and the resolved surrogates will be converted to keys
*
* @param filterExpressionResolverTree
@@ -113,22 +113,22 @@ public final class FilterUtil {
* @param complexDimensionInfoMap
* @param minMaxCacheColumns
* @param isStreamDataFile: whether create filter executer tree for stream data files
- * @return FilterExecuter instance
+ * @return FilterExecutor instance
*
*/
- private static FilterExecuter createFilterExecuterTree(
+ private static FilterExecutor createFilterExecutorTree(
FilterResolverIntf filterExpressionResolverTree, SegmentProperties segmentProperties,
Map<Integer, GenericQueryType> complexDimensionInfoMap,
List<CarbonColumn> minMaxCacheColumns, boolean isStreamDataFile) {
- FilterExecuterType filterExecuterType = filterExpressionResolverTree.getFilterExecuterType();
- if (null != filterExecuterType) {
- switch (filterExecuterType) {
+ FilterExecutorType filterExecutorType = filterExpressionResolverTree.getFilterExecutorType();
+ if (null != filterExecutorType) {
+ switch (filterExecutorType) {
case INCLUDE:
if (null != filterExpressionResolverTree.getDimColResolvedFilterInfo()
&& null != filterExpressionResolverTree.getDimColResolvedFilterInfo()
.getFilterValues() && filterExpressionResolverTree.getDimColResolvedFilterInfo()
.getFilterValues().isOptimized()) {
- return getExcludeFilterExecuter(
+ return getExcludeFilterExecutor(
filterExpressionResolverTree.getDimColResolvedFilterInfo(),
filterExpressionResolverTree.getMsrColResolvedFilterInfo(), segmentProperties);
}
@@ -137,24 +137,24 @@ public final class FilterUtil {
segmentProperties, minMaxCacheColumns, isStreamDataFile)) {
return new TrueFilterExecutor();
}
- return getIncludeFilterExecuter(
+ return getIncludeFilterExecutor(
filterExpressionResolverTree.getDimColResolvedFilterInfo(),
filterExpressionResolverTree.getMsrColResolvedFilterInfo(), segmentProperties);
case EXCLUDE:
- return getExcludeFilterExecuter(
+ return getExcludeFilterExecutor(
filterExpressionResolverTree.getDimColResolvedFilterInfo(),
filterExpressionResolverTree.getMsrColResolvedFilterInfo(), segmentProperties);
case OR:
- return new OrFilterExecuterImpl(
- createFilterExecuterTree(filterExpressionResolverTree.getLeft(), segmentProperties,
+ return new OrFilterExecutorImpl(
+ createFilterExecutorTree(filterExpressionResolverTree.getLeft(), segmentProperties,
complexDimensionInfoMap, minMaxCacheColumns, isStreamDataFile),
- createFilterExecuterTree(filterExpressionResolverTree.getRight(), segmentProperties,
+ createFilterExecutorTree(filterExpressionResolverTree.getRight(), segmentProperties,
complexDimensionInfoMap, minMaxCacheColumns, isStreamDataFile));
case AND:
- return new AndFilterExecuterImpl(
- createFilterExecuterTree(filterExpressionResolverTree.getLeft(), segmentProperties,
+ return new AndFilterExecutorImpl(
+ createFilterExecutorTree(filterExpressionResolverTree.getLeft(), segmentProperties,
complexDimensionInfoMap, minMaxCacheColumns, isStreamDataFile),
- createFilterExecuterTree(filterExpressionResolverTree.getRight(), segmentProperties,
+ createFilterExecutorTree(filterExpressionResolverTree.getRight(), segmentProperties,
complexDimensionInfoMap, minMaxCacheColumns, isStreamDataFile));
case ROWLEVEL_LESSTHAN:
case ROWLEVEL_LESSTHAN_EQUALTO:
@@ -165,12 +165,12 @@ public final class FilterUtil {
(RowLevelRangeFilterResolverImpl) filterExpressionResolverTree;
if (checkIfCurrentNodeToBeReplacedWithTrueFilterExpression(
rowLevelRangeFilterResolver.getDimColEvaluatorInfoList(),
- rowLevelRangeFilterResolver.getMsrColEvalutorInfoList(), segmentProperties,
+ rowLevelRangeFilterResolver.getMsrColEvaluatorInfoList(), segmentProperties,
minMaxCacheColumns, isStreamDataFile)) {
return new TrueFilterExecutor();
}
- return RowLevelRangeTypeExecuterFactory
- .getRowLevelRangeTypeExecuter(filterExecuterType, filterExpressionResolverTree,
+ return RowLevelRangeTypeExecutorFactory
+ .getRowLevelRangeTypeExecutor(filterExecutorType, filterExpressionResolverTree,
segmentProperties);
case RANGE:
// return true filter expression if filter column min/max is not cached in driver
@@ -178,7 +178,7 @@ public final class FilterUtil {
segmentProperties, minMaxCacheColumns, isStreamDataFile)) {
return new TrueFilterExecutor();
}
- return new RangeValueFilterExecuterImpl(
+ return new RangeValueFilterExecutorImpl(
filterExpressionResolverTree.getDimColResolvedFilterInfo(),
filterExpressionResolverTree.getFilterExpression(),
((ConditionalFilterResolverImpl) filterExpressionResolverTree)
@@ -190,14 +190,14 @@ public final class FilterUtil {
case ROWLEVEL:
default:
if (filterExpressionResolverTree.getFilterExpression() instanceof UnknownExpression) {
- FilterExecuter filterExecuter =
+ FilterExecutor filterExecutor =
((UnknownExpression) filterExpressionResolverTree.getFilterExpression())
- .getFilterExecuter(filterExpressionResolverTree, segmentProperties);
- if (filterExecuter != null) {
- return filterExecuter;
+ .getFilterExecutor(filterExpressionResolverTree, segmentProperties);
+ if (filterExecutor != null) {
+ return filterExecutor;
}
}
- return new RowLevelFilterExecuterImpl(
+ return new RowLevelFilterExecutorImpl(
((RowLevelFilterResolverImpl) filterExpressionResolverTree)
.getDimColEvaluatorInfoList(),
((RowLevelFilterResolverImpl) filterExpressionResolverTree)
@@ -208,7 +208,7 @@ public final class FilterUtil {
}
}
- return new RowLevelFilterExecuterImpl(
+ return new RowLevelFilterExecutorImpl(
((RowLevelFilterResolverImpl) filterExpressionResolverTree).getDimColEvaluatorInfoList(),
((RowLevelFilterResolverImpl) filterExpressionResolverTree).getMsrColEvalutorInfoList(),
((RowLevelFilterResolverImpl) filterExpressionResolverTree).getFilterExpresion(),
@@ -218,13 +218,13 @@ public final class FilterUtil {
}
/**
- * It gives filter executer based on columnar or column group
+ * It gives filter executor based on columnar or column group
*
* @param dimColResolvedFilterInfo
* @param segmentProperties
* @return
*/
- private static FilterExecuter getIncludeFilterExecuter(
+ private static FilterExecutor getIncludeFilterExecutor(
DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
MeasureColumnResolvedFilterInfo msrColResolvedFilterInfo,
SegmentProperties segmentProperties) {
@@ -238,7 +238,7 @@ public final class FilterUtil {
msrColResolvedFilterInfoCopyObject.setMeasure(measuresFromCurrentBlock);
msrColResolvedFilterInfoCopyObject.setColumnIndex(measuresFromCurrentBlock.getOrdinal());
msrColResolvedFilterInfoCopyObject.setType(measuresFromCurrentBlock.getDataType());
- return new IncludeFilterExecuterImpl(null, msrColResolvedFilterInfoCopyObject,
+ return new IncludeFilterExecutorImpl(null, msrColResolvedFilterInfoCopyObject,
segmentProperties, true);
} else {
return new RestructureIncludeFilterExecutorImpl(dimColResolvedFilterInfo,
@@ -257,7 +257,7 @@ public final class FilterUtil {
dimColResolvedFilterInfo.getCopyObject();
dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
- return new IncludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
+ return new IncludeFilterExecutorImpl(dimColResolvedFilterInfoCopyObject, null,
segmentProperties, false);
} else {
return new RestructureIncludeFilterExecutorImpl(dimColResolvedFilterInfo,
@@ -409,13 +409,13 @@ public final class FilterUtil {
}
/**
- * It gives filter executer based on columnar or column group
+ * It gives filter executor based on columnar or column group
*
* @param dimColResolvedFilterInfo
* @param segmentProperties
* @return
*/
- private static FilterExecuter getExcludeFilterExecuter(
+ private static FilterExecutor getExcludeFilterExecutor(
DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
MeasureColumnResolvedFilterInfo msrColResolvedFilterInfo,
SegmentProperties segmentProperties) {
@@ -430,7 +430,7 @@ public final class FilterUtil {
msrColResolvedFilterInfoCopyObject.setMeasure(measuresFromCurrentBlock);
msrColResolvedFilterInfoCopyObject.setColumnIndex(measuresFromCurrentBlock.getOrdinal());
msrColResolvedFilterInfoCopyObject.setType(measuresFromCurrentBlock.getDataType());
- return new ExcludeFilterExecuterImpl(null, msrColResolvedFilterInfoCopyObject,
+ return new ExcludeFilterExecutorImpl(null, msrColResolvedFilterInfoCopyObject,
segmentProperties, true);
} else {
return new RestructureExcludeFilterExecutorImpl(dimColResolvedFilterInfo,
@@ -445,7 +445,7 @@ public final class FilterUtil {
dimColResolvedFilterInfo.getCopyObject();
dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
- return new ExcludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
+ return new ExcludeFilterExecutorImpl(dimColResolvedFilterInfoCopyObject, null,
segmentProperties, false);
} else {
return new RestructureExcludeFilterExecutorImpl(dimColResolvedFilterInfo,
@@ -567,7 +567,7 @@ public final class FilterUtil {
throw new FilterUnsupportedException("Unsupported Filter condition: " + result, ex);
}
- java.util.Comparator<byte[]> filterNoDictValueComaparator = new java.util.Comparator<byte[]>() {
+ java.util.Comparator<byte[]> filterNoDictValueComparator = new java.util.Comparator<byte[]>() {
@Override
public int compare(byte[] filterMember1, byte[] filterMember2) {
@@ -576,7 +576,7 @@ public final class FilterUtil {
}
};
- Collections.sort(filterValuesList, filterNoDictValueComaparator);
+ Collections.sort(filterValuesList, filterNoDictValueComparator);
ColumnFilterInfo columnFilterInfo = null;
if (filterValuesList.size() > 0) {
columnFilterInfo = new ColumnFilterInfo();
@@ -618,9 +618,9 @@ public final class FilterUtil {
throw new FilterUnsupportedException("Unsupported Filter condition: " + result, ex);
}
- SerializableComparator filterMeasureComaparator =
+ SerializableComparator filterMeasureComparator =
Comparator.getComparatorByDataTypeForMeasure(dataType);
- Collections.sort(filterValuesList, filterMeasureComaparator);
+ Collections.sort(filterValuesList, filterMeasureComparator);
ColumnFilterInfo columnFilterInfo = null;
if (filterValuesList.size() > 0) {
columnFilterInfo = new ColumnFilterInfo();
@@ -665,14 +665,14 @@ public final class FilterUtil {
boolean isExclude, int[] keys, List<byte[]> filterValuesList,
int keyOrdinalOfDimensionFromCurrentBlock) {
if (null != columnFilterInfo) {
- List<Integer> listOfsurrogates = null;
+ List<Integer> listOfSurrogates = null;
if (!isExclude && columnFilterInfo.isIncludeFilter()) {
- listOfsurrogates = columnFilterInfo.getFilterList();
+ listOfSurrogates = columnFilterInfo.getFilterList();
} else if (isExclude || !columnFilterInfo.isIncludeFilter()) {
- listOfsurrogates = columnFilterInfo.getExcludeFilterList();
+ listOfSurrogates = columnFilterInfo.getExcludeFilterList();
}
- if (null != listOfsurrogates) {
- for (Integer surrogate : listOfsurrogates) {
+ if (null != listOfSurrogates) {
+ for (Integer surrogate : listOfSurrogates) {
keys[keyOrdinalOfDimensionFromCurrentBlock] = surrogate;
filterValuesList.add(ByteUtil.convertIntToBytes(surrogate));
}
@@ -686,14 +686,14 @@ public final class FilterUtil {
private static byte[][] getFilterValueInBytesForDictRange(ColumnFilterInfo columnFilterInfo,
int[] keys, List<byte[]> filterValuesList, int keyOrdinalOfDimensionFromCurrentBlock) {
if (null != columnFilterInfo) {
- List<Integer> listOfsurrogates = columnFilterInfo.getFilterList();
- if (listOfsurrogates == null || listOfsurrogates.size() > 1) {
+ List<Integer> listOfSurrogates = columnFilterInfo.getFilterList();
+ if (listOfSurrogates == null || listOfSurrogates.size() > 1) {
throw new RuntimeException(
"Filter values cannot be null in case of range in dictionary include");
}
// Here we only get the first column as there can be only one range column.
- keys[keyOrdinalOfDimensionFromCurrentBlock] = listOfsurrogates.get(0);
- filterValuesList.add(ByteUtil.convertIntToBytes(listOfsurrogates.get(0)));
+ keys[keyOrdinalOfDimensionFromCurrentBlock] = listOfSurrogates.get(0);
+ filterValuesList.add(ByteUtil.convertIntToBytes(listOfSurrogates.get(0)));
}
return filterValuesList.toArray(new byte[filterValuesList.size()][]);
}
@@ -723,7 +723,7 @@ public final class FilterUtil {
/**
* Below method will be used to convert the filter surrogate keys
- * to mdkey
+ * to MDKey
*
* @param columnFilterInfo
* @param carbonDimension
@@ -753,31 +753,31 @@ public final class FilterUtil {
}
/**
- * API will create an filter executer tree based on the filter resolver
+ * API will create an filter executor tree based on the filter resolver
*
* @param filterExpressionResolverTree
* @param segmentProperties
* @return
*/
- public static FilterExecuter getFilterExecuterTree(
+ public static FilterExecutor getFilterExecutorTree(
FilterResolverIntf filterExpressionResolverTree, SegmentProperties segmentProperties,
Map<Integer, GenericQueryType> complexDimensionInfoMap, boolean isStreamDataFile) {
- return getFilterExecuterTree(filterExpressionResolverTree, segmentProperties,
+ return getFilterExecutorTree(filterExpressionResolverTree, segmentProperties,
complexDimensionInfoMap, null, isStreamDataFile);
}
/**
- * API will create an filter executer tree based on the filter resolver and minMaxColumns
+ * API will create an filter executor tree based on the filter resolver and minMaxColumns
*
* @param filterExpressionResolverTree
* @param segmentProperties
* @return
*/
- public static FilterExecuter getFilterExecuterTree(
+ public static FilterExecutor getFilterExecutorTree(
FilterResolverIntf filterExpressionResolverTree, SegmentProperties segmentProperties,
Map<Integer, GenericQueryType> complexDimensionInfoMap,
List<CarbonColumn> minMaxCacheColumns, boolean isStreamDataFile) {
- return createFilterExecuterTree(filterExpressionResolverTree, segmentProperties,
+ return createFilterExecutorTree(filterExpressionResolverTree, segmentProperties,
complexDimensionInfoMap, minMaxCacheColumns, isStreamDataFile);
}
@@ -787,12 +787,12 @@ public final class FilterUtil {
* @param filterValues
* @param segmentProperties
* @param dimension
- * @param dimColumnExecuterInfo
+ * @param dimColumnExecutorInfo
*/
public static void prepareKeysFromSurrogates(ColumnFilterInfo filterValues,
SegmentProperties segmentProperties, CarbonDimension dimension,
- DimColumnExecuterFilterInfo dimColumnExecuterInfo, CarbonMeasure measures,
- MeasureColumnExecuterFilterInfo msrColumnExecuterInfo) {
+ DimColumnExecutorFilterInfo dimColumnExecutorInfo, CarbonMeasure measures,
+ MeasureColumnExecutorFilterInfo msrColumnExecutorInfo) {
if (null != measures) {
DataType filterColumnDataType = DataTypes.valueOf(measures.getDataType().getId());
DataTypeConverterImpl converter = new DataTypeConverterImpl();
@@ -805,18 +805,18 @@ public final class FilterUtil {
converter);
}
}
- msrColumnExecuterInfo.setFilterKeys(keysBasedOnFilter, filterColumnDataType);
+ msrColumnExecutorInfo.setFilterKeys(keysBasedOnFilter, filterColumnDataType);
} else {
if (filterValues == null) {
- dimColumnExecuterInfo.setFilterKeys(new byte[0][]);
+ dimColumnExecutorInfo.setFilterKeys(new byte[0][]);
} else {
byte[][] keysBasedOnFilter =
getKeyArray(filterValues, dimension, segmentProperties, false, false);
if (!filterValues.isIncludeFilter() || filterValues.isOptimized()) {
- dimColumnExecuterInfo.setExcludeFilterKeys(
+ dimColumnExecutorInfo.setExcludeFilterKeys(
getKeyArray(filterValues, dimension, segmentProperties, true, false));
}
- dimColumnExecuterInfo.setFilterKeys(keysBasedOnFilter);
+ dimColumnExecutorInfo.setFilterKeys(keysBasedOnFilter);
}
}
}
@@ -961,7 +961,7 @@ public final class FilterUtil {
}
}
- public static void updateIndexOfColumnExpression(Expression exp, int dimOridnalMax) {
+ public static void updateIndexOfColumnExpression(Expression exp, int dimOrdinalMax) {
// if expression is null, not require to update index.
if (exp == null) {
return;
@@ -973,14 +973,14 @@ public final class FilterUtil {
if (column.isDimension()) {
ce.setColIndex(column.getOrdinal());
} else {
- ce.setColIndex(dimOridnalMax + column.getOrdinal());
+ ce.setColIndex(dimOrdinalMax + column.getOrdinal());
}
}
} else {
if (exp.getChildren().size() > 0) {
List<Expression> children = exp.getChildren();
for (int i = 0; i < children.size(); i++) {
- updateIndexOfColumnExpression(children.get(i), dimOridnalMax);
+ updateIndexOfColumnExpression(children.get(i), dimOrdinalMax);
}
}
}
@@ -1044,7 +1044,7 @@ public final class FilterUtil {
}
/**
- * This methdd will check if ImplictFilter is present or not
+ * This method will check if ImplicitFilter is present or not
* if it is present then return that ImplicitFilterExpression
*
* @param expression
@@ -1062,7 +1062,7 @@ public final class FilterUtil {
if (childExpression instanceof ColumnExpression && ((ColumnExpression) childExpression)
.getColumnName().equalsIgnoreCase(CarbonCommonConstants.POSITION_ID)) {
// Remove the right expression node and point the expression to left node expression
- // if 1st children is implict column positionID then 2nd children will be
+ // if 1st children is implicit column positionID then 2nd children will be
// implicit filter list
return children.get(1);
}
@@ -1130,13 +1130,13 @@ public final class FilterUtil {
* @return sorted encoded filter values
*/
private static byte[][] getSortedEncodedFilters(List<byte[]> encodedFilters) {
- java.util.Comparator<byte[]> filterNoDictValueComaparator = new java.util.Comparator<byte[]>() {
+ java.util.Comparator<byte[]> filterNoDictValueComparator = new java.util.Comparator<byte[]>() {
@Override
public int compare(byte[] filterMember1, byte[] filterMember2) {
return ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterMember1, filterMember2);
}
};
- Collections.sort(encodedFilters, filterNoDictValueComaparator);
+ Collections.sort(encodedFilters, filterNoDictValueComparator);
return encodedFilters.toArray(new byte[encodedFilters.size()][]);
}
@@ -1165,9 +1165,9 @@ public final class FilterUtil {
Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
row.setValues(new Object[] { DataTypeUtil.getDataBasedOnDataType(stringValue,
columnExpression.getCarbonColumn().getDataType()) });
- Boolean rslt = expression.evaluate(row).getBoolean();
- if (null != rslt) {
- if (rslt) {
+ Boolean result = expression.evaluate(row).getBoolean();
+ if (null != result) {
+ if (result) {
includeFilterBitSet.set(i);
}
}
@@ -1217,7 +1217,7 @@ public final class FilterUtil {
/**
* Below method will be used to get filter executor instance for range filters
- * when local dictonary is present for in blocklet
+ * when local dictionary is present for in blocklet
* @param rawColumnChunk
* raw column chunk
* @param exp
@@ -1226,7 +1226,7 @@ public final class FilterUtil {
* is data was already sorted
* @return
*/
- public static FilterExecuter getFilterExecutorForRangeFilters(
+ public static FilterExecutor getFilterExecutorForRangeFilters(
DimensionRawColumnChunk rawColumnChunk, Expression exp, boolean isNaturalSorted) {
BitSet includeDictionaryValues;
try {
@@ -1241,13 +1241,13 @@ public final class FilterUtil {
byte[][] encodedFilterValues = FilterUtil
.getEncodedFilterValuesForRange(includeDictionaryValues,
rawColumnChunk.getLocalDictionary(), isExclude);
- FilterExecuter filterExecuter;
+ FilterExecutor filterExecutor;
if (!isExclude) {
- filterExecuter = new IncludeFilterExecuterImpl(encodedFilterValues, isNaturalSorted);
+ filterExecutor = new IncludeFilterExecutorImpl(encodedFilterValues, isNaturalSorted);
} else {
- filterExecuter = new ExcludeFilterExecuterImpl(encodedFilterValues, isNaturalSorted);
+ filterExecutor = new ExcludeFilterExecutorImpl(encodedFilterValues, isNaturalSorted);
}
- return filterExecuter;
+ return filterExecutor;
}
/**
@@ -1268,7 +1268,7 @@ public final class FilterUtil {
Object value =
DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(minMaxBytes, dataType);
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
Object data = DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(filterValue, dataType);
SerializableComparator comparator = Comparator.getComparator(dataType);
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecutorImpl.java
similarity index 72%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecutorImpl.java
index bbacc71..cca4231 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/AndFilterExecutorImpl.java
@@ -25,24 +25,24 @@ import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
import org.apache.carbondata.core.util.BitSetGroup;
-public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilterExecutor {
+public class AndFilterExecutorImpl implements FilterExecutor, ImplicitColumnFilterExecutor {
- private FilterExecuter leftExecuter;
- private FilterExecuter rightExecuter;
+ private FilterExecutor leftExecutor;
+ private FilterExecutor rightExecutor;
- public AndFilterExecuterImpl(FilterExecuter leftExecuter, FilterExecuter rightExecuter) {
- this.leftExecuter = leftExecuter;
- this.rightExecuter = rightExecuter;
+ public AndFilterExecutorImpl(FilterExecutor leftExecutor, FilterExecutor rightExecuter) {
+ this.leftExecutor = leftExecutor;
+ this.rightExecutor = rightExecuter;
}
@Override
public BitSetGroup applyFilter(RawBlockletColumnChunks rawBlockletColumnChunks,
boolean useBitsetPipeLine) throws FilterUnsupportedException, IOException {
- BitSetGroup leftFilters = leftExecuter.applyFilter(rawBlockletColumnChunks, useBitsetPipeLine);
+ BitSetGroup leftFilters = leftExecutor.applyFilter(rawBlockletColumnChunks, useBitsetPipeLine);
if (leftFilters.isEmpty()) {
return leftFilters;
}
- BitSetGroup rightFilter = rightExecuter.applyFilter(rawBlockletColumnChunks, useBitsetPipeLine);
+ BitSetGroup rightFilter = rightExecutor.applyFilter(rawBlockletColumnChunks, useBitsetPipeLine);
if (rightFilter.isEmpty()) {
return rightFilter;
}
@@ -54,11 +54,11 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
@Override
public BitSet prunePages(RawBlockletColumnChunks rawBlockletColumnChunks)
throws FilterUnsupportedException, IOException {
- BitSet leftFilters = leftExecuter.prunePages(rawBlockletColumnChunks);
+ BitSet leftFilters = leftExecutor.prunePages(rawBlockletColumnChunks);
if (leftFilters.isEmpty()) {
return leftFilters;
}
- BitSet rightFilter = rightExecuter.prunePages(rawBlockletColumnChunks);
+ BitSet rightFilter = rightExecutor.prunePages(rawBlockletColumnChunks);
if (rightFilter.isEmpty()) {
return rightFilter;
}
@@ -69,18 +69,18 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
@Override
public boolean applyFilter(RowIntf value, int dimOrdinalMax)
throws FilterUnsupportedException, IOException {
- return leftExecuter.applyFilter(value, dimOrdinalMax) &&
- rightExecuter.applyFilter(value, dimOrdinalMax);
+ return leftExecutor.applyFilter(value, dimOrdinalMax) &&
+ rightExecutor.applyFilter(value, dimOrdinalMax);
}
@Override
public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
boolean[] isMinMaxSet) {
- BitSet leftFilters = leftExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
+ BitSet leftFilters = leftExecutor.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
if (leftFilters.isEmpty()) {
return leftFilters;
}
- BitSet rightFilter = rightExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
+ BitSet rightFilter = rightExecutor.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
if (rightFilter.isEmpty()) {
return rightFilter;
}
@@ -90,30 +90,30 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
@Override
public void readColumnChunks(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException {
- leftExecuter.readColumnChunks(rawBlockletColumnChunks);
- rightExecuter.readColumnChunks(rawBlockletColumnChunks);
+ leftExecutor.readColumnChunks(rawBlockletColumnChunks);
+ rightExecutor.readColumnChunks(rawBlockletColumnChunks);
}
@Override
public BitSet isFilterValuesPresentInBlockOrBlocklet(byte[][] maxValue, byte[][] minValue,
String uniqueBlockPath, boolean[] isMinMaxSet) {
BitSet leftFilters = null;
- if (leftExecuter instanceof ImplicitColumnFilterExecutor) {
- leftFilters = ((ImplicitColumnFilterExecutor) leftExecuter)
+ if (leftExecutor instanceof ImplicitColumnFilterExecutor) {
+ leftFilters = ((ImplicitColumnFilterExecutor) leftExecutor)
.isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath, isMinMaxSet);
} else {
- leftFilters = leftExecuter
+ leftFilters = leftExecutor
.isScanRequired(maxValue, minValue, isMinMaxSet);
}
if (leftFilters.isEmpty()) {
return leftFilters;
}
BitSet rightFilter = null;
- if (rightExecuter instanceof ImplicitColumnFilterExecutor) {
- rightFilter = ((ImplicitColumnFilterExecutor) rightExecuter)
+ if (rightExecutor instanceof ImplicitColumnFilterExecutor) {
+ rightFilter = ((ImplicitColumnFilterExecutor) rightExecutor)
.isFilterValuesPresentInBlockOrBlocklet(maxValue, minValue, uniqueBlockPath, isMinMaxSet);
} else {
- rightFilter = rightExecuter.isScanRequired(maxValue, minValue, isMinMaxSet);
+ rightFilter = rightExecutor.isScanRequired(maxValue, minValue, isMinMaxSet);
}
if (rightFilter.isEmpty()) {
return rightFilter;
@@ -127,11 +127,11 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
boolean[] isMinMaxSet) {
Boolean leftRes;
BitSet tempFilter;
- if (leftExecuter instanceof ImplicitColumnFilterExecutor) {
- leftRes = ((ImplicitColumnFilterExecutor) leftExecuter)
+ if (leftExecutor instanceof ImplicitColumnFilterExecutor) {
+ leftRes = ((ImplicitColumnFilterExecutor) leftExecutor)
.isFilterValuesPresentInAbstractIndex(maxValue, minValue, isMinMaxSet);
} else {
- tempFilter = leftExecuter
+ tempFilter = leftExecutor
.isScanRequired(maxValue, minValue, isMinMaxSet);
leftRes = !tempFilter.isEmpty();
}
@@ -140,11 +140,11 @@ public class AndFilterExecuterImpl implements FilterExecuter, ImplicitColumnFilt
}
Boolean rightRes = null;
- if (rightExecuter instanceof ImplicitColumnFilterExecutor) {
- rightRes = ((ImplicitColumnFilterExecutor) rightExecuter)
+ if (rightExecutor instanceof ImplicitColumnFilterExecutor) {
+ rightRes = ((ImplicitColumnFilterExecutor) rightExecutor)
.isFilterValuesPresentInAbstractIndex(maxValue, minValue, isMinMaxSet);
} else {
- tempFilter = rightExecuter
+ tempFilter = rightExecutor
.isScanRequired(maxValue, minValue, isMinMaxSet);
rightRes = !tempFilter.isEmpty();
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/BitSetUpdaterFactory.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/BitSetUpdaterFactory.java
index 375ba61..0dcc945 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/BitSetUpdaterFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/BitSetUpdaterFactory.java
@@ -19,7 +19,7 @@ package org.apache.carbondata.core.scan.filter.executer;
import java.util.BitSet;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
/**
* Class for updating the bitset
@@ -30,15 +30,15 @@ public final class BitSetUpdaterFactory {
public static final BitSetUpdaterFactory INSTANCE = new BitSetUpdaterFactory();
- public FilterBitSetUpdater getBitSetUpdater(FilterExecuterType filterExecuterType) {
- switch (filterExecuterType) {
+ public FilterBitSetUpdater getBitSetUpdater(FilterExecutorType filterExecutorType) {
+ switch (filterExecutorType) {
case INCLUDE:
return new IncludeFilterBitSetUpdater();
case EXCLUDE:
return new ExcludeFilterBitSetUpdater();
default:
throw new UnsupportedOperationException(
- "Invalid filter executor type:" + filterExecuterType);
+ "Invalid filter executor type:" + filterExecutorType);
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/DimColumnExecuterFilterInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/DimColumnExecutorFilterInfo.java
similarity index 96%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/DimColumnExecuterFilterInfo.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/DimColumnExecutorFilterInfo.java
index 6291de2..2d3cc0f 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/DimColumnExecuterFilterInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/DimColumnExecutorFilterInfo.java
@@ -17,7 +17,7 @@
package org.apache.carbondata.core.scan.filter.executer;
-public class DimColumnExecuterFilterInfo {
+public class DimColumnExecutorFilterInfo {
byte[][] filterKeys;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecutorImpl.java
similarity index 93%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecutorImpl.java
index baa89de..845245c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeFilterExecutorImpl.java
@@ -28,7 +28,7 @@ import org.apache.carbondata.core.datastore.page.ColumnPage;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.scan.filter.FilterExecutorUtil;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
@@ -40,12 +40,12 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
-public class ExcludeFilterExecuterImpl implements FilterExecuter {
+public class ExcludeFilterExecutorImpl implements FilterExecutor {
private DimColumnResolvedFilterInfo dimColEvaluatorInfo;
- private DimColumnExecuterFilterInfo dimColumnExecuterInfo;
+ private DimColumnExecutorFilterInfo dimColumnExecuterInfo;
private MeasureColumnResolvedFilterInfo msrColumnEvaluatorInfo;
- private MeasureColumnExecuterFilterInfo msrColumnExecutorInfo;
+ private MeasureColumnExecutorFilterInfo msrColumnExecutorInfo;
protected SegmentProperties segmentProperties;
private boolean isDimensionPresentInCurrentBlock = false;
private boolean isMeasurePresentInCurrentBlock = false;
@@ -59,22 +59,22 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
private FilterBitSetUpdater filterBitSetUpdater;
- public ExcludeFilterExecuterImpl(byte[][] filterValues, boolean isNaturalSorted) {
+ public ExcludeFilterExecutorImpl(byte[][] filterValues, boolean isNaturalSorted) {
this.filterValues = filterValues;
this.isNaturalSorted = isNaturalSorted;
this.filterBitSetUpdater =
- BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecuterType.EXCLUDE);
+ BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecutorType.EXCLUDE);
}
- public ExcludeFilterExecuterImpl(DimColumnResolvedFilterInfo dimColEvaluatorInfo,
+ public ExcludeFilterExecutorImpl(DimColumnResolvedFilterInfo dimColEvaluatorInfo,
MeasureColumnResolvedFilterInfo msrColumnEvaluatorInfo, SegmentProperties segmentProperties,
boolean isMeasure) {
this.filterBitSetUpdater =
- BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecuterType.EXCLUDE);
+ BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecutorType.EXCLUDE);
this.segmentProperties = segmentProperties;
if (!isMeasure) {
this.dimColEvaluatorInfo = dimColEvaluatorInfo;
- dimColumnExecuterInfo = new DimColumnExecuterFilterInfo();
+ dimColumnExecuterInfo = new DimColumnExecutorFilterInfo();
FilterUtil.prepareKeysFromSurrogates(dimColEvaluatorInfo.getFilterValues(), segmentProperties,
dimColEvaluatorInfo.getDimension(), dimColumnExecuterInfo, null, null);
@@ -84,7 +84,7 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
.getDimension().isSortColumn();
} else {
this.msrColumnEvaluatorInfo = msrColumnEvaluatorInfo;
- msrColumnExecutorInfo = new MeasureColumnExecuterFilterInfo();
+ msrColumnExecutorInfo = new MeasureColumnExecutorFilterInfo();
FilterUtil
.prepareKeysFromSurrogates(msrColumnEvaluatorInfo.getFilterValues(), segmentProperties,
null, null, msrColumnEvaluatorInfo.getMeasure(), msrColumnExecutorInfo);
@@ -190,12 +190,12 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
return true;
}
- private BitSet getFilteredIndexes(ColumnPage columnPage, int numerOfRows, DataType msrType) {
+ private BitSet getFilteredIndexes(ColumnPage columnPage, int numberOfRows, DataType msrType) {
// Here the algorithm is
// Get the measure values from the chunk. compare sequentially with the
// the filter values. The one that matches sets it Bitset.
- BitSet bitSet = new BitSet(numerOfRows);
- bitSet.flip(0, numerOfRows);
+ BitSet bitSet = new BitSet(numberOfRows);
+ bitSet.flip(0, numberOfRows);
FilterExecutorUtil.executeIncludeExcludeFilterForMeasure(columnPage, bitSet,
msrColumnExecutorInfo, msrColumnEvaluatorInfo, filterBitSetUpdater);
return bitSet;
@@ -230,7 +230,7 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
* @param pageNumber
* @param numberOfRows
* @param msrDataType
- * @return filtred indexes bitset
+ * @return filtered indexes bitset
*/
private BitSet getFilteredIndexesForMsrUsingPrvBitSet(ColumnPage measureColumnPage,
BitSetGroup prvBitSetGroup, int pageNumber, int numberOfRows, DataType msrDataType) {
@@ -280,7 +280,7 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
if (filterValues.length > 0 && CarbonUtil
.usePreviousFilterBitsetGroup(useBitsetPipeLine, prvBitSetGroup, pageNumber,
filterValues.length)) {
- return getFilteredIndexesUisngPrvBitset(dimensionColumnPage, prvBitSetGroup, pageNumber);
+ return getFilteredIndexesUsingPrvBitset(dimensionColumnPage, prvBitSetGroup, pageNumber);
} else {
return getFilteredIndexes(dimensionColumnPage, numberOfRows);
}
@@ -289,9 +289,9 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
int numberOfRows) {
if (dimensionColumnPage.isExplicitSorted()) {
- return setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
+ return setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
}
- return setFilterdIndexToBitSet(dimensionColumnPage, numberOfRows);
+ return setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows);
}
/**
@@ -301,7 +301,7 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
* @param pageNumber
* @return filtered indexes bitset
*/
- private BitSet getFilteredIndexesUisngPrvBitset(DimensionColumnPage dimensionColumnPage,
+ private BitSet getFilteredIndexesUsingPrvBitset(DimensionColumnPage dimensionColumnPage,
BitSetGroup prvBitSetGroup, int pageNumber) {
BitSet prvPageBitSet = prvBitSetGroup.getBitSet(pageNumber);
if (prvPageBitSet == null || prvPageBitSet.isEmpty()) {
@@ -343,7 +343,7 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
return bitSet;
}
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
DimensionColumnPage dimensionColumnPage, int numerOfRows) {
BitSet bitSet = new BitSet(numerOfRows);
bitSet.flip(0, numerOfRows);
@@ -368,7 +368,7 @@ public class ExcludeFilterExecuterImpl implements FilterExecuter {
return bitSet;
}
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
int numerOfRows) {
BitSet bitSet = new BitSet(numerOfRows);
bitSet.flip(0, numerOfRows);
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
index 4aa74ef..383deff 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FalseFilterExecutor.java
@@ -23,7 +23,7 @@ import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
import org.apache.carbondata.core.util.BitSetGroup;
-public class FalseFilterExecutor implements FilterExecuter {
+public class FalseFilterExecutor implements FilterExecutor {
@Override
public BitSetGroup applyFilter(RawBlockletColumnChunks rawChunks, boolean useBitsetPipeline) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecutor.java
similarity index 98%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecutor.java
index 6077e84..8db775a 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecuter.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/FilterExecutor.java
@@ -25,7 +25,7 @@ import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
import org.apache.carbondata.core.util.BitSetGroup;
-public interface FilterExecuter {
+public interface FilterExecutor {
/**
* API will apply filter based on resolver instance
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
index ebf88a6..a027d5d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ImplicitIncludeFilterExecutorImpl.java
@@ -32,7 +32,7 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
* on the implicit column filter values
*/
public class ImplicitIncludeFilterExecutorImpl
- implements FilterExecuter, ImplicitColumnFilterExecutor {
+ implements FilterExecutor, ImplicitColumnFilterExecutor {
private final DimColumnResolvedFilterInfo dimColumnEvaluatorInfo;
@@ -111,7 +111,7 @@ public class ImplicitIncludeFilterExecutorImpl
/**
* For implicit column filtering, complete data need to be selected. As it is a special case
- * no data need to be discarded, implicit filtering is only for slecting block and blocklets
+ * no data need to be discarded, implicit filtering is only for selecting block and blocklets
*
* @param numberOfRows
* @return
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecutorImpl.java
similarity index 93%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecutorImpl.java
index 41a8f85..7ab5716 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeFilterExecutorImpl.java
@@ -30,7 +30,7 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.scan.filter.FilterExecutorUtil;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
@@ -42,12 +42,12 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
-public class IncludeFilterExecuterImpl implements FilterExecuter {
+public class IncludeFilterExecutorImpl implements FilterExecutor {
protected DimColumnResolvedFilterInfo dimColumnEvaluatorInfo;
- DimColumnExecuterFilterInfo dimColumnExecuterInfo;
+ DimColumnExecutorFilterInfo dimColumnExecutorInfo;
private MeasureColumnResolvedFilterInfo msrColumnEvaluatorInfo;
- private MeasureColumnExecuterFilterInfo msrColumnExecutorInfo;
+ private MeasureColumnExecutorFilterInfo msrColumnExecutorInfo;
protected SegmentProperties segmentProperties;
private boolean isDimensionPresentInCurrentBlock = false;
private boolean isMeasurePresentInCurrentBlock = false;
@@ -61,25 +61,25 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
private FilterBitSetUpdater filterBitSetUpdater;
- public IncludeFilterExecuterImpl(byte[][] filterValues, boolean isNaturalSorted) {
+ public IncludeFilterExecutorImpl(byte[][] filterValues, boolean isNaturalSorted) {
this.filterValues = filterValues;
this.isNaturalSorted = isNaturalSorted;
this.filterBitSetUpdater =
- BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecuterType.INCLUDE);
+ BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecutorType.INCLUDE);
}
- public IncludeFilterExecuterImpl(DimColumnResolvedFilterInfo dimColumnEvaluatorInfo,
+ public IncludeFilterExecutorImpl(DimColumnResolvedFilterInfo dimColumnEvaluatorInfo,
MeasureColumnResolvedFilterInfo msrColumnEvaluatorInfo, SegmentProperties segmentProperties,
boolean isMeasure) {
this.filterBitSetUpdater =
- BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecuterType.INCLUDE);
+ BitSetUpdaterFactory.INSTANCE.getBitSetUpdater(FilterExecutorType.INCLUDE);
this.segmentProperties = segmentProperties;
if (!isMeasure) {
this.dimColumnEvaluatorInfo = dimColumnEvaluatorInfo;
- dimColumnExecuterInfo = new DimColumnExecuterFilterInfo();
+ dimColumnExecutorInfo = new DimColumnExecutorFilterInfo();
FilterUtil
.prepareKeysFromSurrogates(dimColumnEvaluatorInfo.getFilterValues(), segmentProperties,
- dimColumnEvaluatorInfo.getDimension(), dimColumnExecuterInfo, null, null);
+ dimColumnEvaluatorInfo.getDimension(), dimColumnExecutorInfo, null, null);
isDimensionPresentInCurrentBlock = true;
isNaturalSorted =
dimColumnEvaluatorInfo.getDimension().isUseInvertedIndex() && dimColumnEvaluatorInfo
@@ -87,7 +87,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
} else {
this.msrColumnEvaluatorInfo = msrColumnEvaluatorInfo;
- msrColumnExecutorInfo = new MeasureColumnExecuterFilterInfo();
+ msrColumnExecutorInfo = new MeasureColumnExecutorFilterInfo();
comparator =
Comparator.getComparatorByDataTypeForMeasure(
FilterUtil.getMeasureDataType(msrColumnEvaluatorInfo));
@@ -113,7 +113,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
DimensionRawColumnChunk dimensionRawColumnChunk =
rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
BitSetGroup bitSetGroup = new BitSetGroup(dimensionRawColumnChunk.getPagesCount());
- filterValues = dimColumnExecuterInfo.getFilterKeys();
+ filterValues = dimColumnExecutorInfo.getFilterKeys();
boolean isDecoded = false;
for (int i = 0; i < dimensionRawColumnChunk.getPagesCount(); i++) {
if (dimensionRawColumnChunk.getMaxValues() != null) {
@@ -122,7 +122,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
if (!isDecoded) {
filterValues = FilterUtil
.getEncodedFilterValues(dimensionRawColumnChunk.getLocalDictionary(),
- dimColumnExecuterInfo.getFilterKeys());
+ dimColumnExecutorInfo.getFilterKeys());
isDecoded = true;
}
BitSet bitSet = getFilteredIndexes(dimensionColumnPage,
@@ -182,12 +182,12 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
&& dimColumnEvaluatorInfo.getDimension().getDataType() != DataTypes.DATE) {
scanRequired = isScanRequired(dimensionRawColumnChunk.getMaxValues()[columnIndex],
dimensionRawColumnChunk.getMinValues()[columnIndex],
- dimColumnExecuterInfo.getFilterKeys(),
+ dimColumnExecutorInfo.getFilterKeys(),
dimColumnEvaluatorInfo.getDimension().getDataType());
} else {
scanRequired = isScanRequired(dimensionRawColumnChunk.getMaxValues()[columnIndex],
dimensionRawColumnChunk.getMinValues()[columnIndex],
- dimColumnExecuterInfo.getFilterKeys(),
+ dimColumnExecutorInfo.getFilterKeys(),
dimensionRawColumnChunk.getMinMaxFlagArray()[columnIndex]);
}
return scanRequired;
@@ -206,7 +206,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
}
DimensionRawColumnChunk dimensionRawColumnChunk =
rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
- filterValues = dimColumnExecuterInfo.getFilterKeys();
+ filterValues = dimColumnExecutorInfo.getFilterKeys();
BitSet bitSet = new BitSet(dimensionRawColumnChunk.getPagesCount());
for (int i = 0; i < dimensionRawColumnChunk.getPagesCount(); i++) {
if (dimensionRawColumnChunk.getMaxValues() != null) {
@@ -248,7 +248,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
@Override
public boolean applyFilter(RowIntf value, int dimOrdinalMax) {
if (isDimensionPresentInCurrentBlock) {
- byte[][] filterValues = dimColumnExecuterInfo.getFilterKeys();
+ byte[][] filterValues = dimColumnExecutorInfo.getFilterKeys();
byte[] col = (byte[])value.getVal(dimColumnEvaluatorInfo.getDimension().getOrdinal());
for (int i = 0; i < filterValues.length; i++) {
if (0 == ByteUtil.UnsafeComparer.INSTANCE.compareTo(col, 0, col.length,
@@ -314,7 +314,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
* @param pageNumber
* @param numberOfRows
* @param msrDataType
- * @return filtred indexes bitset
+ * @return filtered indexes bitset
*/
private BitSet getFilteredIndexesForMsrUsingPrvBitSet(ColumnPage measureColumnPage,
BitSetGroup prvBitSetGroup, int pageNumber, int numberOfRows, DataType msrDataType) {
@@ -363,7 +363,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
if (filterValues.length > 0 && CarbonUtil
.usePreviousFilterBitsetGroup(useBitsetPipeLine, prvBitSetGroup, pageNumber,
filterValues.length)) {
- return getFilteredIndexesUisngPrvBitset(dimensionColumnPage, prvBitSetGroup, pageNumber,
+ return getFilteredIndexesUsingPrvBitset(dimensionColumnPage, prvBitSetGroup, pageNumber,
numberOfRows);
} else {
return getFilteredIndexes(dimensionColumnPage, numberOfRows);
@@ -373,9 +373,9 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
int numberOfRows) {
if (dimensionColumnPage.isExplicitSorted()) {
- return setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
+ return setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
}
- return setFilterdIndexToBitSet(dimensionColumnPage, numberOfRows);
+ return setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows);
}
/**
@@ -387,7 +387,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
* @param numberOfRows
* @return filtered bitset
*/
- private BitSet getFilteredIndexesUisngPrvBitset(DimensionColumnPage dimensionColumnPage,
+ private BitSet getFilteredIndexesUsingPrvBitset(DimensionColumnPage dimensionColumnPage,
BitSetGroup prvBitSetGroup, int pageNumber, int numberOfRows) {
BitSet prvPageBitSet = prvBitSetGroup.getBitSet(pageNumber);
if (prvPageBitSet == null || prvPageBitSet.isEmpty()) {
@@ -420,7 +420,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
return bitSet;
}
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
DimensionColumnPage dimensionColumnPage, int numerOfRows) {
BitSet bitSet = new BitSet(numerOfRows);
if (filterValues.length == 0) {
@@ -444,9 +444,9 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
return bitSet;
}
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
if (filterValues.length == 0) {
return bitSet;
}
@@ -455,11 +455,11 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
if (isNaturalSorted && dimensionColumnPage.isExplicitSorted()) {
int startIndex = 0;
for (int i = 0; i < filterValues.length; i++) {
- if (startIndex >= numerOfRows) {
+ if (startIndex >= numberOfRows) {
break;
}
int[] rangeIndex = CarbonUtil
- .getRangeIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getRangeIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[i]);
for (int j = rangeIndex[0]; j <= rangeIndex[1]; j++) {
bitSet.set(j);
@@ -470,7 +470,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
}
} else {
if (filterValues.length > 1) {
- for (int i = 0; i < numerOfRows; i++) {
+ for (int i = 0; i < numberOfRows; i++) {
int index = CarbonUtil.binarySearch(filterValues, 0, filterValues.length - 1,
dimensionColumnPage, i);
if (index >= 0) {
@@ -478,7 +478,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
}
}
} else {
- for (int j = 0; j < numerOfRows; j++) {
+ for (int j = 0; j < numberOfRows; j++) {
if (dimensionColumnPage.compareTo(j, filterValues[0]) == 0) {
bitSet.set(j);
}
@@ -496,7 +496,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
boolean isScanRequired = false;
if (isDimensionPresentInCurrentBlock) {
- filterValues = dimColumnExecuterInfo.getFilterKeys();
+ filterValues = dimColumnExecutorInfo.getFilterKeys();
chunkIndex = dimColumnEvaluatorInfo.getColumnIndexInMinMaxByteArray();
// for no dictionary measure column comparison can be done
// on the original data as like measure column
@@ -533,7 +533,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
boolean isScanRequired = false;
for (int k = 0; k < filterValues.length; k++) {
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
int maxCompare =
ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blkMaxVal);
@@ -562,7 +562,7 @@ public class IncludeFilterExecuterImpl implements FilterExecuter {
return true;
}
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
Object data =
DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(filterValues[k], dataType);
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/MeasureColumnExecuterFilterInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/MeasureColumnExecutorFilterInfo.java
similarity index 97%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/MeasureColumnExecuterFilterInfo.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/MeasureColumnExecutorFilterInfo.java
index 1a325ea..61b9836 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/MeasureColumnExecuterFilterInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/MeasureColumnExecutorFilterInfo.java
@@ -35,10 +35,10 @@ import it.unimi.dsi.fastutil.shorts.ShortOpenHashSet;
* Below class will be used to keep all the filter values based on data type
* for measure column.
* In this class there are multiple type of set is used to avoid conversion of
- * primitive type to primitive object to avoid gc which cause perofrmace degrade when
+ * primitive type to primitive object to avoid gc which cause performace degrade when
* number of records are high
*/
-public class MeasureColumnExecuterFilterInfo {
+public class MeasureColumnExecutorFilterInfo {
Object[] filterKeys;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecutorImpl.java
similarity index 70%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecutorImpl.java
index 8d3fd8f..01e3fdd 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/OrFilterExecutorImpl.java
@@ -25,21 +25,21 @@ import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
import org.apache.carbondata.core.util.BitSetGroup;
-public class OrFilterExecuterImpl implements FilterExecuter {
+public class OrFilterExecutorImpl implements FilterExecutor {
- private FilterExecuter leftExecuter;
- private FilterExecuter rightExecuter;
+ private FilterExecutor leftExecutor;
+ private FilterExecutor rightExecutor;
- public OrFilterExecuterImpl(FilterExecuter leftExecuter, FilterExecuter rightExecuter) {
- this.leftExecuter = leftExecuter;
- this.rightExecuter = rightExecuter;
+ public OrFilterExecutorImpl(FilterExecutor leftExecutor, FilterExecutor rightExecutor) {
+ this.leftExecutor = leftExecutor;
+ this.rightExecutor = rightExecutor;
}
@Override
public BitSetGroup applyFilter(RawBlockletColumnChunks rawBlockletColumnChunks,
boolean useBitsetPipeLine) throws FilterUnsupportedException, IOException {
- BitSetGroup leftFilters = leftExecuter.applyFilter(rawBlockletColumnChunks, false);
- BitSetGroup rightFilters = rightExecuter.applyFilter(rawBlockletColumnChunks, false);
+ BitSetGroup leftFilters = leftExecutor.applyFilter(rawBlockletColumnChunks, false);
+ BitSetGroup rightFilters = rightExecutor.applyFilter(rawBlockletColumnChunks, false);
leftFilters.or(rightFilters);
rawBlockletColumnChunks.setBitSetGroup(leftFilters);
return leftFilters;
@@ -48,8 +48,8 @@ public class OrFilterExecuterImpl implements FilterExecuter {
@Override
public BitSet prunePages(RawBlockletColumnChunks rawBlockletColumnChunks)
throws FilterUnsupportedException, IOException {
- BitSet leftFilters = leftExecuter.prunePages(rawBlockletColumnChunks);
- BitSet rightFilters = rightExecuter.prunePages(rawBlockletColumnChunks);
+ BitSet leftFilters = leftExecutor.prunePages(rawBlockletColumnChunks);
+ BitSet rightFilters = rightExecutor.prunePages(rawBlockletColumnChunks);
leftFilters.or(rightFilters);
return leftFilters;
}
@@ -57,22 +57,22 @@ public class OrFilterExecuterImpl implements FilterExecuter {
@Override
public boolean applyFilter(RowIntf value, int dimOrdinalMax)
throws FilterUnsupportedException, IOException {
- return leftExecuter.applyFilter(value, dimOrdinalMax) ||
- rightExecuter.applyFilter(value, dimOrdinalMax);
+ return leftExecutor.applyFilter(value, dimOrdinalMax) ||
+ rightExecutor.applyFilter(value, dimOrdinalMax);
}
@Override
public BitSet isScanRequired(byte[][] blockMaxValue, byte[][] blockMinValue,
boolean[] isMinMaxSet) {
- BitSet leftFilters = leftExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
- BitSet rightFilters = rightExecuter.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
+ BitSet leftFilters = leftExecutor.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
+ BitSet rightFilters = rightExecutor.isScanRequired(blockMaxValue, blockMinValue, isMinMaxSet);
leftFilters.or(rightFilters);
return leftFilters;
}
@Override
public void readColumnChunks(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException {
- leftExecuter.readColumnChunks(rawBlockletColumnChunks);
- rightExecuter.readColumnChunks(rawBlockletColumnChunks);
+ leftExecutor.readColumnChunks(rawBlockletColumnChunks);
+ rightExecutor.readColumnChunks(rawBlockletColumnChunks);
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
similarity index 93%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
index 664e046..e4daee3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
@@ -44,7 +44,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
* for this Range. Also search the data block and set the required bitsets which falls within
* the Range of the RANGE Expression.
*/
-public class RangeValueFilterExecuterImpl implements FilterExecuter {
+public class RangeValueFilterExecutorImpl implements FilterExecutor {
private DimColumnResolvedFilterInfo dimColEvaluatorInfo;
private Expression exp;
@@ -66,7 +66,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
private boolean isRangeFullyCoverBlock;
private boolean isNaturalSorted;
- public RangeValueFilterExecuterImpl(DimColumnResolvedFilterInfo dimColEvaluatorInfo,
+ public RangeValueFilterExecutorImpl(DimColumnResolvedFilterInfo dimColEvaluatorInfo,
Expression exp, byte[][] filterRangeValues, SegmentProperties segmentProperties) {
this.dimColEvaluatorInfo = dimColEvaluatorInfo;
@@ -272,8 +272,8 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
endBlockMaxisDefaultEnd = false;
/*
- For Undertsanding the below logic kept the value evaluation code intact.
- int filterMinlessThanBlockMin =
+ For understanding the below logic kept the value evaluation code intact.
+ int filterMinLessThanBlockMin =
ByteUtil.UnsafeComparer.INSTANCE.compareTo(blockMinValue, filterValues[0]);
int filterMaxLessThanBlockMin =
ByteUtil.UnsafeComparer.INSTANCE.compareTo(blockMinValue, filterValues[1]);
@@ -392,7 +392,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
DimensionRawColumnChunk rawColumnChunk =
blockChunkHolder.getDimensionRawColumnChunks()[chunkIndex];
BitSetGroup bitSetGroup = new BitSetGroup(rawColumnChunk.getPagesCount());
- FilterExecuter filterExecuter = null;
+ FilterExecutor filterExecutor = null;
boolean isExclude = false;
for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
if (rawColumnChunk.getMaxValues() != null) {
@@ -407,20 +407,20 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
BitSet bitSet;
DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
if (null != rawColumnChunk.getLocalDictionary()) {
- if (null == filterExecuter) {
- filterExecuter = FilterUtil
+ if (null == filterExecutor) {
+ filterExecutor = FilterUtil
.getFilterExecutorForRangeFilters(rawColumnChunk, exp, isNaturalSorted);
- if (filterExecuter instanceof ExcludeFilterExecuterImpl) {
+ if (filterExecutor instanceof ExcludeFilterExecutorImpl) {
isExclude = true;
}
}
if (!isExclude) {
- bitSet = ((IncludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((IncludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
blockChunkHolder.getBitSetGroup(), i);
} else {
- bitSet = ((ExcludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((ExcludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
blockChunkHolder.getBitSetGroup(), i);
@@ -442,26 +442,26 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
}
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
+ int numberOfRows) {
if (dimensionColumnPage.isExplicitSorted()) {
- return setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numerOfRows);
+ return setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
}
- return setFilterdIndexToBitSet(dimensionColumnPage, numerOfRows);
+ return setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows);
}
/**
* Method will scan the block and finds the range start index from which all members
* will be considered for applying range filters. this method will be called if the
* column is not supported by default so column index mapping will be present for
- * accesing the members from the block.
+ * accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
- DimensionColumnPage dimensionColumnPage, int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
+ DimensionColumnPage dimensionColumnPage, int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
int start = 0;
int startIndex = 0;
int startMin = 0;
@@ -473,17 +473,17 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
// Get the Min Value
if (!startBlockMinIsDefaultStart) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[0], greaterThanExp);
if (greaterThanExp && start >= 0) {
start = CarbonUtil
.nextGreaterValueToTarget(start, dimensionColumnPage, filterValues[0],
- numerOfRows);
+ numberOfRows);
}
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// Method will compare the tentative index value after binary search, this tentative
@@ -503,7 +503,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
// Get the Max value
if (!endBlockMaxisDefaultEnd) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[1], lessThanEqualExp);
if (lessThanExp && start >= 0) {
@@ -513,7 +513,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// In case the start is less than 0, then positive value of start is pointing to the next
@@ -526,7 +526,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
}
endMax = start;
} else {
- endMax = numerOfRows - 1;
+ endMax = numberOfRows - 1;
}
for (int j = startMin; j <= endMax; j++) {
@@ -535,7 +535,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
// Binary Search cannot be done on '@NU#LL$!", so need to check and compare for null on
// matching row.
- if (dimensionColumnPage.isNoDicitionaryColumn() && !dimensionColumnPage.isAdaptiveEncoded()) {
+ if (dimensionColumnPage.isNoDictionaryColumn() && !dimensionColumnPage.isAdaptiveEncoded()) {
updateForNoDictionaryColumn(startMin, endMax, dimensionColumnPage, bitSet);
}
return bitSet;
@@ -555,15 +555,15 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
* Method will scan the block and finds the range start index from which all
* members will be considered for applying range filters. this method will
* be called if the column is sorted default so column index
- * mapping will be present for accesaing the members from the block.
+ * mapping will be present for accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
// if (dimensionColumnPage instanceof FixedLengthDimensionColumnPage) {
byte[][] filterValues = this.filterRangesValues;
if (dimensionColumnPage.isExplicitSorted()) {
@@ -576,18 +576,18 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
if (!startBlockMinIsDefaultStart) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[0], greaterThanExp);
if (greaterThanExp && start >= 0) {
start = CarbonUtil
.nextGreaterValueToTarget(start, dimensionColumnPage, filterValues[0],
- numerOfRows);
+ numberOfRows);
}
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// Method will compare the tentative index value after binary search, this tentative
@@ -605,7 +605,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
if (!endBlockMaxisDefaultEnd) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[1], lessThanEqualExp);
if (lessThanExp && start >= 0) {
@@ -615,7 +615,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// In case the start is less than 0, then positive value of start is pointing to the next
@@ -627,7 +627,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
}
endMax = start;
} else {
- endMax = numerOfRows - 1;
+ endMax = numberOfRows - 1;
}
for (int j = startMin; j <= endMax; j++) {
@@ -636,7 +636,7 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
// Binary Search cannot be done on '@NU#LL$!", so need to check and compare for null on
// matching row.
- if (dimensionColumnPage.isNoDicitionaryColumn()) {
+ if (dimensionColumnPage.isNoDictionaryColumn()) {
updateForNoDictionaryColumn(startMin, endMax, dimensionColumnPage, bitSet);
}
} else {
@@ -653,10 +653,10 @@ public class RangeValueFilterExecuterImpl implements FilterExecuter {
// evaluate result for lower range value first and then perform and operation in the
// upper range value in order to compute the final result
bitSet = evaluateGreaterThanFilterForUnsortedColumn(dimensionColumnPage, filterValues[0],
- numerOfRows);
+ numberOfRows);
BitSet upperRangeBitSet =
evaluateLessThanFilterForUnsortedColumn(dimensionColumnPage, filterValues[1],
- numerOfRows);
+ numberOfRows);
bitSet.and(upperRangeBitSet);
FilterUtil.removeNullValues(dimensionColumnPage, bitSet, defaultValue);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
index 452e07f..fd685f8 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
@@ -40,7 +40,7 @@ import org.apache.carbondata.core.util.comparator.SerializableComparator;
/**
* Abstract class for restructure
*/
-public abstract class RestructureEvaluatorImpl implements FilterExecuter {
+public abstract class RestructureEvaluatorImpl implements FilterExecutor {
/**
* This method will check whether a default value for the non-existing column is present
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecutorImpl.java
similarity index 97%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecutorImpl.java
index d0429e2..8a8a841 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecutorImpl.java
@@ -58,10 +58,10 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.log4j.Logger;
-public class RowLevelFilterExecuterImpl implements FilterExecuter {
+public class RowLevelFilterExecutorImpl implements FilterExecutor {
private static final Logger LOGGER =
- LogServiceFactory.getLogService(RowLevelFilterExecuterImpl.class.getName());
+ LogServiceFactory.getLogService(RowLevelFilterExecutorImpl.class.getName());
List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList;
List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList;
protected Expression exp;
@@ -100,7 +100,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
*/
boolean isNaturalSorted;
- public RowLevelFilterExecuterImpl(List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
+ public RowLevelFilterExecutorImpl(List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
AbsoluteTableIdentifier tableIdentifier, SegmentProperties segmentProperties,
Map<Integer, GenericQueryType> complexDimensionInfoMap) {
@@ -226,7 +226,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
BitSet set = new BitSet(numberOfRows[i]);
RowIntf row = new RowImpl();
BitSet prvBitset = null;
- // if bitset pipe line is enabled then use rowid from previous bitset
+ // if bitset pipe line is enabled then use row id from previous bitset
// otherwise use older flow
if (!useBitsetPipeLine ||
null == rawBlockletColumnChunks.getBitSetGroup() ||
@@ -234,9 +234,9 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
rawBlockletColumnChunks.getBitSetGroup().getBitSet(i).isEmpty()) {
for (int index = 0; index < numberOfRows[i]; index++) {
createRow(rawBlockletColumnChunks, row, i, index);
- Boolean rslt = false;
+ Boolean result = false;
try {
- rslt = exp.evaluate(row).getBoolean();
+ result = exp.evaluate(row).getBoolean();
}
// Any invalid member while evaluation shall be ignored, system will log the
// error only once since all rows the evaluation happens so inorder to avoid
@@ -244,7 +244,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
catch (FilterIllegalMemberException e) {
FilterUtil.logError(e, false);
}
- if (null != rslt && rslt) {
+ if (null != result && result) {
set.set(index);
}
}
@@ -253,13 +253,13 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
for (int index = prvBitset.nextSetBit(0);
index >= 0; index = prvBitset.nextSetBit(index + 1)) {
createRow(rawBlockletColumnChunks, row, i, index);
- Boolean rslt = false;
+ Boolean result = false;
try {
- rslt = exp.evaluate(row).getBoolean();
+ result = exp.evaluate(row).getBoolean();
} catch (FilterIllegalMemberException e) {
FilterUtil.logError(e, false);
}
- if (null != rslt && rslt) {
+ if (null != result && result) {
set.set(index);
}
}
@@ -310,7 +310,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
continue;
}
if (!dimColumnEvaluatorInfo.getDimension().getDataType().isComplexType()) {
- if (!dimColumnEvaluatorInfo.isDimensionExistsInCurrentSilce()) {
+ if (!dimColumnEvaluatorInfo.isDimensionExistsInCurrentSlice()) {
record[index] = dimColumnEvaluatorInfo.getDimension().getDefaultValue();
}
byte[] memberBytes = (byte[]) value.getVal(index);
@@ -371,7 +371,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
continue;
}
if (!dimColumnEvaluatorInfo.getDimension().getDataType().isComplexType()) {
- if (!dimColumnEvaluatorInfo.isDimensionExistsInCurrentSilce()) {
+ if (!dimColumnEvaluatorInfo.isDimensionExistsInCurrentSlice()) {
record[dimColumnEvaluatorInfo.getRowIndex()] =
dimColumnEvaluatorInfo.getDimension().getDefaultValue();
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGreaterThanEqualFilterExecutorImpl.java
similarity index 92%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGreaterThanEqualFilterExecutorImpl.java
index 7d245d9..aebcd25 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGreaterThanEqualFilterExecutorImpl.java
@@ -46,7 +46,7 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
-public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilterExecuterImpl {
+public class RowLevelRangeGreaterThanEqualFilterExecutorImpl extends RowLevelFilterExecutorImpl {
private byte[][] filterRangeValues;
private Object[] msrFilterRangeValues;
@@ -56,7 +56,7 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
*/
private boolean isDefaultValuePresentInFilter;
- RowLevelRangeGrtrThanEquaToFilterExecuterImpl(
+ RowLevelRangeGreaterThanEqualFilterExecutorImpl(
List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
@@ -158,7 +158,7 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
boolean isScanRequired = false;
for (int k = 0; k < filterValues.length; k++) {
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
int maxCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMaxValue);
// if any filter value is in range than this block needs to be
@@ -181,7 +181,7 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
return true;
}
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
Object data =
DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(filterValues[k], dataType);
@@ -234,7 +234,7 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
DimensionRawColumnChunk rawColumnChunk =
rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
BitSetGroup bitSetGroup = new BitSetGroup(rawColumnChunk.getPagesCount());
- FilterExecuter filterExecuter = null;
+ FilterExecutor filterExecutor = null;
boolean isExclude = false;
for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
if (rawColumnChunk.getMaxValues() != null) {
@@ -249,20 +249,20 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
BitSet bitSet = null;
if (null != rawColumnChunk.getLocalDictionary()) {
- if (null == filterExecuter) {
- filterExecuter = FilterUtil
+ if (null == filterExecutor) {
+ filterExecutor = FilterUtil
.getFilterExecutorForRangeFilters(rawColumnChunk, exp, isNaturalSorted);
- if (filterExecuter instanceof ExcludeFilterExecuterImpl) {
+ if (filterExecutor instanceof ExcludeFilterExecutorImpl) {
isExclude = true;
}
}
if (!isExclude) {
- bitSet = ((IncludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((IncludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
} else {
- bitSet = ((ExcludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((ExcludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
@@ -416,8 +416,8 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
}
private BitSet getFilteredIndexesForMeasures(ColumnPage columnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
Object[] filterValues = this.msrFilterRangeValues;
DataType msrType = msrColEvalutorInfoList.get(0).getType();
SerializableComparator comparator = Comparator.getComparatorByDataTypeForMeasure(msrType);
@@ -429,7 +429,7 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
}
continue;
}
- for (int startIndex = 0; startIndex < numerOfRows; startIndex++) {
+ for (int startIndex = 0; startIndex < numberOfRows; startIndex++) {
if (!nullBitSet.get(startIndex)) {
Object msrValue = DataTypeUtil
.getMeasureObjectBasedOnDataType(columnPage, startIndex,
@@ -446,12 +446,12 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
}
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
+ int numberOfRows) {
BitSet bitSet = null;
if (dimensionColumnPage.isExplicitSorted()) {
- bitSet = setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numerOfRows);
+ bitSet = setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
} else {
- bitSet = setFilterdIndexToBitSet(dimensionColumnPage, numerOfRows);
+ bitSet = setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows);
}
byte[] defaultValue = null;
if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.STRING) {
@@ -462,7 +462,7 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
} else if (!dimensionColumnPage.isAdaptiveEncoded()) {
defaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
}
- if (dimensionColumnPage.isNoDicitionaryColumn() ||
+ if (dimensionColumnPage.isNoDictionaryColumn() ||
dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.DATE) {
FilterUtil.removeNullValues(dimensionColumnPage, bitSet, defaultValue);
}
@@ -473,26 +473,26 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
* Method will scan the block and finds the range start index from which all members
* will be considered for applying range filters. this method will be called if the
* column is not supported by default so column index mapping will be present for
- * accesing the members from the block.
+ * accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
- DimensionColumnPage dimensionColumnPage, int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
+ DimensionColumnPage dimensionColumnPage, int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
int start = 0;
int last = 0;
int startIndex = 0;
byte[][] filterValues = this.filterRangeValues;
for (int i = 0; i < filterValues.length; i++) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[i], false);
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// Method will compare the tentative index value after binary search, this tentative
@@ -505,12 +505,12 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
}
}
last = start;
- for (int j = start; j < numerOfRows; j++) {
+ for (int j = start; j < numberOfRows; j++) {
bitSet.set(dimensionColumnPage.getInvertedIndex(j));
last++;
}
startIndex = last;
- if (startIndex >= numerOfRows) {
+ if (startIndex >= numberOfRows) {
break;
}
}
@@ -521,15 +521,15 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
* Method will scan the block and finds the range start index from which all
* members will be considered for applying range filters. this method will
* be called if the column is sorted default so column index
- * mapping will be present for accesing the members from the block.
+ * mapping will be present for accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
byte[][] filterValues = this.filterRangeValues;
// binary search can only be applied if column is sorted
if (isNaturalSorted && dimensionColumnPage.isExplicitSorted()) {
@@ -539,10 +539,10 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
for (int k = 0; k < filterValues.length; k++) {
start = CarbonUtil
.getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex,
- numerOfRows - 1, filterValues[k], false);
+ numberOfRows - 1, filterValues[k], false);
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// Method will compare the tentative index value after binary search, this tentative
@@ -554,18 +554,18 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
}
last = start;
- for (int j = start; j < numerOfRows; j++) {
+ for (int j = start; j < numberOfRows; j++) {
bitSet.set(j);
last++;
}
startIndex = last;
- if (startIndex >= numerOfRows) {
+ if (startIndex >= numberOfRows) {
break;
}
}
} else {
for (int k = 0; k < filterValues.length; k++) {
- for (int i = 0; i < numerOfRows; i++) {
+ for (int i = 0; i < numberOfRows; i++) {
if (ByteUtil.compare(dimensionColumnPage.getChunkData(i), filterValues[k]) >= 0) {
bitSet.set(i);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGreaterThanFilterExecutorImpl.java
similarity index 91%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGreaterThanFilterExecutorImpl.java
index 7cef5f4..e06414e 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGreaterThanFilterExecutorImpl.java
@@ -46,22 +46,22 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
-public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecuterImpl {
+public class RowLevelRangeGreaterThanFilterExecutorImpl extends RowLevelFilterExecutorImpl {
private byte[][] filterRangeValues;
private Object[] msrFilterRangeValues;
private SerializableComparator comparator;
-
/**
* flag to check whether default values is present in the filter value list
*/
private boolean isDefaultValuePresentInFilter;
- RowLevelRangeGrtThanFiterExecuterImpl(List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
- List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
+ RowLevelRangeGreaterThanFilterExecutorImpl(
+ List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
+ List<MeasureColumnResolvedFilterInfo> msrColEvoluatorInfoList, Expression exp,
AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
Object[] msrFilterRangeValues, SegmentProperties segmentProperties) {
- super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
+ super(dimColEvaluatorInfoList, msrColEvoluatorInfoList, exp, tableIdentifier, segmentProperties,
null);
this.filterRangeValues = filterRangeValues;
this.msrFilterRangeValues = msrFilterRangeValues;
@@ -216,7 +216,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
boolean isScanRequired = false;
for (int k = 0; k < filterValues.length; k++) {
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
int maxCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMaxValue);
// if any filter value is in range than this block needs to be
@@ -239,7 +239,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
return true;
}
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
Object data =
DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(filterValues[k], dataType);
@@ -291,7 +291,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
DimensionRawColumnChunk rawColumnChunk =
rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
BitSetGroup bitSetGroup = new BitSetGroup(rawColumnChunk.getPagesCount());
- FilterExecuter filterExecuter = null;
+ FilterExecutor filterExecutor = null;
boolean isExclude = false;
for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
if (rawColumnChunk.getMaxValues() != null) {
@@ -306,20 +306,20 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
BitSet bitSet = null;
DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
if (null != rawColumnChunk.getLocalDictionary()) {
- if (null == filterExecuter) {
- filterExecuter = FilterUtil
+ if (null == filterExecutor) {
+ filterExecutor = FilterUtil
.getFilterExecutorForRangeFilters(rawColumnChunk, exp, isNaturalSorted);
- if (filterExecuter instanceof ExcludeFilterExecuterImpl) {
+ if (filterExecutor instanceof ExcludeFilterExecutorImpl) {
isExclude = true;
}
}
if (!isExclude) {
- bitSet = ((IncludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((IncludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
} else {
- bitSet = ((ExcludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((ExcludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
@@ -413,8 +413,8 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
}
private BitSet getFilteredIndexesForMeasures(ColumnPage columnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
Object[] filterValues = this.msrFilterRangeValues;
DataType msrType = msrColEvalutorInfoList.get(0).getType();
SerializableComparator comparator = Comparator.getComparatorByDataTypeForMeasure(msrType);
@@ -426,7 +426,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
}
continue;
}
- for (int startIndex = 0; startIndex < numerOfRows; startIndex++) {
+ for (int startIndex = 0; startIndex < numberOfRows; startIndex++) {
if (!nullBitSet.get(startIndex)) {
Object msrValue = DataTypeUtil
.getMeasureObjectBasedOnDataType(columnPage, startIndex,
@@ -443,12 +443,12 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
}
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
+ int numberOfRows) {
BitSet bitSet = null;
if (dimensionColumnPage.isExplicitSorted()) {
- bitSet = setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numerOfRows);
+ bitSet = setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows);
} else {
- bitSet = setFilterdIndexToBitSet(dimensionColumnPage, numerOfRows);
+ bitSet = setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows);
}
byte[] defaultValue = null;
if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.STRING) {
@@ -459,7 +459,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
} else if (!dimensionColumnPage.isAdaptiveEncoded()) {
defaultValue = CarbonCommonConstants.EMPTY_BYTE_ARRAY;
}
- if (dimensionColumnPage.isNoDicitionaryColumn() ||
+ if (dimensionColumnPage.isNoDictionaryColumn() ||
dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.DATE) {
FilterUtil.removeNullValues(dimensionColumnPage, bitSet, defaultValue);
}
@@ -470,27 +470,27 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
* Method will scan the block and finds the range start index from which all members
* will be considered for applying range filters. this method will be called if the
* column is not supported by default so column index mapping will be present for
- * accesing the members from the block.
+ * accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
- DimensionColumnPage dimensionColumnPage, int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
+ DimensionColumnPage dimensionColumnPage, int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
int start = 0;
int last = 0;
int startIndex = 0;
byte[][] filterValues = this.filterRangeValues;
for (int i = 0; i < filterValues.length; i++) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[i], true);
if (start >= 0) {
start = CarbonUtil
.nextGreaterValueToTarget(start, dimensionColumnPage, filterValues[i],
- numerOfRows);
+ numberOfRows);
}
// Logic will handle the case where the range filter member is not present in block
// in this case the binary search will return the index from where the bit sets will be
@@ -498,7 +498,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
// from the next element which is greater than filter member.
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// Method will compare the tentative index value after binary search, this tentative
@@ -512,12 +512,12 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
}
last = start;
- for (int j = start; j < numerOfRows; j++) {
+ for (int j = start; j < numberOfRows; j++) {
bitSet.set(dimensionColumnPage.getInvertedIndex(j));
last++;
}
startIndex = last;
- if (startIndex >= numerOfRows) {
+ if (startIndex >= numberOfRows) {
break;
}
}
@@ -529,15 +529,15 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
* Method will scan the block and finds the range start index from which all
* members will be considered for applying range filters. this method will
* be called if the column is sorted default so column index
- * mapping will be present for accesing the members from the block.
+ * mapping will be present for accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
byte[][] filterValues = this.filterRangeValues;
// binary search can only be applied if column is sorted
if (isNaturalSorted && dimensionColumnPage.isExplicitSorted()) {
@@ -547,15 +547,15 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
for (int k = 0; k < filterValues.length; k++) {
start = CarbonUtil
.getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex,
- numerOfRows - 1, filterValues[k], true);
+ numberOfRows - 1, filterValues[k], true);
if (start >= 0) {
start = CarbonUtil
.nextGreaterValueToTarget(start, dimensionColumnPage, filterValues[k],
- numerOfRows);
+ numberOfRows);
}
if (start < 0) {
start = -(start + 1);
- if (start == numerOfRows) {
+ if (start == numberOfRows) {
start = start - 1;
}
// Method will compare the tentative index value after binary search, this tentative
@@ -566,18 +566,18 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
}
}
last = start;
- for (int j = start; j < numerOfRows; j++) {
+ for (int j = start; j < numberOfRows; j++) {
bitSet.set(j);
last++;
}
startIndex = last;
- if (startIndex >= numerOfRows) {
+ if (startIndex >= numberOfRows) {
break;
}
}
} else {
for (int k = 0; k < filterValues.length; k++) {
- for (int i = 0; i < numerOfRows; i++) {
+ for (int i = 0; i < numberOfRows; i++) {
if (ByteUtil.compare(dimensionColumnPage.getChunkData(i), filterValues[k]) > 0) {
bitSet.set(i);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecutorImpl.java
similarity index 92%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecutorImpl.java
index 23ff13a..5dd50a0 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecutorImpl.java
@@ -46,7 +46,7 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
-public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilterExecuterImpl {
+public class RowLevelRangeLessThanEqualFilterExecutorImpl extends RowLevelFilterExecutorImpl {
protected byte[][] filterRangeValues;
protected Object[] msrFilterRangeValues;
protected SerializableComparator comparator;
@@ -55,7 +55,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
* flag to check whether default values is present in the filter value list
*/
private boolean isDefaultValuePresentInFilter;
- public RowLevelRangeLessThanEqualFilterExecuterImpl(
+ public RowLevelRangeLessThanEqualFilterExecutorImpl(
List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
@@ -160,7 +160,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue);
// if any filter applied is not in range of min and max of block
- // then since its a less than equal to fiter validate whether the block
+ // then since its a less than equal to filter validate whether the block
// min range is less than equal to applied filter member
if (minCompare >= 0) {
isScanRequired = true;
@@ -180,7 +180,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
return true;
}
// filter value should be in range of max and min value i.e
- // max>filtervalue>min
+ // max>filterValue>min
// so filter-max should be negative
Object data =
DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(filterValues[k], dataType);
@@ -233,7 +233,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
DimensionRawColumnChunk rawColumnChunk =
rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
BitSetGroup bitSetGroup = new BitSetGroup(rawColumnChunk.getPagesCount());
- FilterExecuter filterExecuter = null;
+ FilterExecutor filterExecutor = null;
boolean isExclude = false;
for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
if (rawColumnChunk.getMinValues() != null) {
@@ -241,20 +241,20 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
BitSet bitSet;
DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
if (null != rawColumnChunk.getLocalDictionary()) {
- if (null == filterExecuter) {
- filterExecuter = FilterUtil
+ if (null == filterExecutor) {
+ filterExecutor = FilterUtil
.getFilterExecutorForRangeFilters(rawColumnChunk, exp, isNaturalSorted);
- if (filterExecuter instanceof ExcludeFilterExecuterImpl) {
+ if (filterExecutor instanceof ExcludeFilterExecutorImpl) {
isExclude = true;
}
}
if (!isExclude) {
- bitSet = ((IncludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((IncludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
} else {
- bitSet = ((ExcludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((ExcludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
@@ -394,8 +394,8 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
}
private BitSet getFilteredIndexesForMeasures(ColumnPage columnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
Object[] filterValues = this.msrFilterRangeValues;
DataType msrType = msrColEvalutorInfoList.get(0).getType();
SerializableComparator comparator = Comparator.getComparatorByDataTypeForMeasure(msrType);
@@ -407,7 +407,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
}
continue;
}
- for (int startIndex = 0; startIndex < numerOfRows; startIndex++) {
+ for (int startIndex = 0; startIndex < numberOfRows; startIndex++) {
if (!nullBitSet.get(startIndex)) {
Object msrValue = DataTypeUtil
.getMeasureObjectBasedOnDataType(columnPage, startIndex,
@@ -424,7 +424,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
}
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
+ int numberOfRows) {
byte[] defaultValue = null;
if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.DATE) {
defaultValue = FilterUtil
@@ -434,16 +434,16 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
}
BitSet bitSet = null;
if (dimensionColumnPage.isExplicitSorted()) {
- bitSet = setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numerOfRows,
+ bitSet = setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows,
dimensionColumnPage.isAdaptiveEncoded() ? null : defaultValue);
} else {
- bitSet = setFilterdIndexToBitSet(dimensionColumnPage, numerOfRows,
+ bitSet = setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows,
dimensionColumnPage.isAdaptiveEncoded() ? null : defaultValue);
}
if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.STRING) {
defaultValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY;
}
- if (dimensionColumnPage.isNoDicitionaryColumn() ||
+ if (dimensionColumnPage.isNoDictionaryColumn() ||
dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.DATE) {
FilterUtil.removeNullValues(dimensionColumnPage, bitSet, defaultValue);
}
@@ -454,16 +454,16 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
* Method will scan the block and finds the range start index from which all members
* will be considered for applying range filters. this method will be called if the
* column is not supported by default so column index mapping will be present for
- * accesing the members from the block.
+ * accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
- DimensionColumnPage dimensionColumnPage, int numerOfRows,
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
+ DimensionColumnPage dimensionColumnPage, int numberOfRows,
byte[] defaultValue) {
- BitSet bitSet = new BitSet(numerOfRows);
+ BitSet bitSet = new BitSet(numberOfRows);
int start = 0;
int last = 0;
int skip = 0;
@@ -472,12 +472,12 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
//find the number of default values to skip the null value in case of direct dictionary
if (null != defaultValue) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
defaultValue, true);
if (start < 0) {
skip = -(start + 1);
// end of block
- if (skip == numerOfRows) {
+ if (skip == numberOfRows) {
return bitSet;
}
} else {
@@ -489,11 +489,11 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
}
for (int i = 0; i < filterValues.length; i++) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[i], true);
if (start < 0) {
start = -(start + 1);
- if (start >= numerOfRows) {
+ if (start >= numberOfRows) {
start = start - 1;
}
// When negative value of start is returned from getFirstIndexUsingBinarySearch the Start
@@ -522,16 +522,16 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
* Method will scan the block and finds the range start index from which all
* members will be considered for applying range filters. this method will
* be called if the column is sorted default so column index
- * mapping will be present for accesing the members from the block.
+ * mapping will be present for accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @param defaultValue
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
- int numerOfRows, byte[] defaultValue) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ int numberOfRows, byte[] defaultValue) {
+ BitSet bitSet = new BitSet(numberOfRows);
byte[][] filterValues = this.filterRangeValues;
// binary search can only be applied if column is sorted
if (isNaturalSorted && dimensionColumnPage.isExplicitSorted()) {
@@ -543,11 +543,11 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
if (null != defaultValue) {
start = CarbonUtil
.getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex,
- numerOfRows - 1, defaultValue, true);
+ numberOfRows - 1, defaultValue, true);
if (start < 0) {
skip = -(start + 1);
// end of block
- if (skip == numerOfRows) {
+ if (skip == numberOfRows) {
return bitSet;
}
} else {
@@ -560,10 +560,10 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
for (int k = 0; k < filterValues.length; k++) {
start = CarbonUtil
.getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex,
- numerOfRows - 1, filterValues[k], true);
+ numberOfRows - 1, filterValues[k], true);
if (start < 0) {
start = -(start + 1);
- if (start >= numerOfRows) {
+ if (start >= numberOfRows) {
start = start - 1;
}
// When negative value of start is returned from getFirstIndexUsingBinarySearch the Start
@@ -585,7 +585,7 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
}
} else {
for (int k = 0; k < filterValues.length; k++) {
- for (int i = 0; i < numerOfRows; i++) {
+ for (int i = 0; i < numberOfRows; i++) {
if (ByteUtil.compare(dimensionColumnPage.getChunkData(i), filterValues[k]) <= 0) {
bitSet.set(i);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecutorImpl.java
similarity index 92%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecutorImpl.java
index 054f04d..0c29675 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecutorImpl.java
@@ -46,7 +46,7 @@ import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.comparator.Comparator;
import org.apache.carbondata.core.util.comparator.SerializableComparator;
-public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecuterImpl {
+public class RowLevelRangeLessThanFilterExecutorImpl extends RowLevelFilterExecutorImpl {
private byte[][] filterRangeValues;
private Object[] msrFilterRangeValues;
private SerializableComparator comparator;
@@ -55,16 +55,16 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
* flag to check whether default values is present in the filter value list
*/
private boolean isDefaultValuePresentInFilter;
- public RowLevelRangeLessThanFilterExecuterImpl(
+ public RowLevelRangeLessThanFilterExecutorImpl(
List<DimColumnResolvedFilterInfo> dimColEvaluatorInfoList,
- List<MeasureColumnResolvedFilterInfo> msrColEvalutorInfoList, Expression exp,
+ List<MeasureColumnResolvedFilterInfo> msrColEvaluatorInfoList, Expression exp,
AbsoluteTableIdentifier tableIdentifier, byte[][] filterRangeValues,
Object[] msrFilterRangeValues, SegmentProperties segmentProperties) {
- super(dimColEvaluatorInfoList, msrColEvalutorInfoList, exp, tableIdentifier, segmentProperties,
+ super(dimColEvaluatorInfoList, msrColEvaluatorInfoList, exp, tableIdentifier, segmentProperties,
null);
this.filterRangeValues = filterRangeValues;
this.msrFilterRangeValues = msrFilterRangeValues;
- if (!msrColEvalutorInfoList.isEmpty()) {
+ if (!msrColEvaluatorInfoList.isEmpty()) {
CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
comparator = Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
}
@@ -160,7 +160,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], blockMinValue);
// if any filter applied is not in range of min and max of block
- // then since its a less than equal to fiter validate whether the block
+ // then since its a less than equal to filter validate whether the block
// min range is less than equal to applied filter member
if (minCompare > 0) {
isScanRequired = true;
@@ -230,7 +230,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
DimensionRawColumnChunk rawColumnChunk =
rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
BitSetGroup bitSetGroup = new BitSetGroup(rawColumnChunk.getPagesCount());
- FilterExecuter filterExecuter = null;
+ FilterExecutor filterExecutor = null;
boolean isExclude = false;
for (int i = 0; i < rawColumnChunk.getPagesCount(); i++) {
if (rawColumnChunk.getMinValues() != null) {
@@ -238,20 +238,20 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
BitSet bitSet;
DimensionColumnPage dimensionColumnPage = rawColumnChunk.decodeColumnPage(i);
if (null != rawColumnChunk.getLocalDictionary()) {
- if (null == filterExecuter) {
- filterExecuter = FilterUtil
+ if (null == filterExecutor) {
+ filterExecutor = FilterUtil
.getFilterExecutorForRangeFilters(rawColumnChunk, exp, isNaturalSorted);
- if (filterExecuter instanceof ExcludeFilterExecuterImpl) {
+ if (filterExecutor instanceof ExcludeFilterExecutorImpl) {
isExclude = true;
}
}
if (!isExclude) {
- bitSet = ((IncludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((IncludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
} else {
- bitSet = ((ExcludeFilterExecuterImpl) filterExecuter)
+ bitSet = ((ExcludeFilterExecutorImpl) filterExecutor)
.getFilteredIndexes(dimensionColumnPage,
rawColumnChunk.getRowCount()[i], useBitsetPipeLine,
rawBlockletColumnChunks.getBitSetGroup(), i);
@@ -389,8 +389,8 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
}
private BitSet getFilteredIndexesForMeasures(ColumnPage columnPage,
- int numerOfRows) {
- BitSet bitSet = new BitSet(numerOfRows);
+ int numberOfRows) {
+ BitSet bitSet = new BitSet(numberOfRows);
Object[] filterValues = this.msrFilterRangeValues;
DataType msrType = msrColEvalutorInfoList.get(0).getType();
SerializableComparator comparator = Comparator.getComparatorByDataTypeForMeasure(msrType);
@@ -402,7 +402,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
}
continue;
}
- for (int startIndex = 0; startIndex < numerOfRows; startIndex++) {
+ for (int startIndex = 0; startIndex < numberOfRows; startIndex++) {
if (!nullBitSet.get(startIndex)) {
Object msrValue = DataTypeUtil
.getMeasureObjectBasedOnDataType(columnPage, startIndex,
@@ -419,7 +419,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
}
private BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
- int numerOfRows) {
+ int numberOfRows) {
byte[] defaultValue = null;
if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.DATE) {
defaultValue = FilterUtil
@@ -429,16 +429,16 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
}
BitSet bitSet = null;
if (dimensionColumnPage.isExplicitSorted()) {
- bitSet = setFilterdIndexToBitSetWithColumnIndex(dimensionColumnPage, numerOfRows,
+ bitSet = setFilteredIndexToBitSetWithColumnIndex(dimensionColumnPage, numberOfRows,
dimensionColumnPage.isAdaptiveEncoded() ? null : defaultValue);
} else {
- bitSet = setFilterdIndexToBitSet(dimensionColumnPage, numerOfRows,
+ bitSet = setFilteredIndexToBitSet(dimensionColumnPage, numberOfRows,
dimensionColumnPage.isAdaptiveEncoded() ? null : defaultValue);
}
if (dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.STRING) {
defaultValue = CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY;
}
- if (dimensionColumnPage.isNoDicitionaryColumn() ||
+ if (dimensionColumnPage.isNoDictionaryColumn() ||
dimColEvaluatorInfoList.get(0).getDimension().getDataType() == DataTypes.DATE) {
FilterUtil.removeNullValues(dimensionColumnPage, bitSet, defaultValue);
}
@@ -449,16 +449,16 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
* Method will scan the block and finds the range start index from which all members
* will be considered for applying range filters. this method will be called if the
* column is not supported by default so column index mapping will be present for
- * accesing the members from the block.
+ * accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSetWithColumnIndex(
- DimensionColumnPage dimensionColumnPage, int numerOfRows,
+ private BitSet setFilteredIndexToBitSetWithColumnIndex(
+ DimensionColumnPage dimensionColumnPage, int numberOfRows,
byte[] defaultValue) {
- BitSet bitSet = new BitSet(numerOfRows);
+ BitSet bitSet = new BitSet(numberOfRows);
int start = 0;
int last = 0;
int startIndex = 0;
@@ -468,12 +468,12 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
//find the number of default values to skip the null value in case of direct dictionary
if (null != defaultValue) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
defaultValue, true);
if (start < 0) {
skip = -(start + 1);
// end of block
- if (skip == numerOfRows) {
+ if (skip == numberOfRows) {
return bitSet;
}
} else {
@@ -486,7 +486,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
for (int i = 0; i < filterValues.length; i++) {
start = CarbonUtil
- .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numerOfRows - 1,
+ .getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex, numberOfRows - 1,
filterValues[i], false);
if (start >= 0) {
// Logic will handle the case where the range filter member is not present in block
@@ -498,7 +498,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
}
if (start < 0) {
start = -(start + 1);
- if (start >= numerOfRows) {
+ if (start >= numberOfRows) {
start = start - 1;
}
// When negative value of start is returned from getFirstIndexUsingBinarySearch the Start
@@ -527,15 +527,15 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
* Method will scan the block and finds the range start index from which all
* members will be considered for applying range filters. this method will
* be called if the column is sorted default so column index
- * mapping will be present for accesing the members from the block.
+ * mapping will be present for accessing the members from the block.
*
* @param dimensionColumnPage
- * @param numerOfRows
+ * @param numberOfRows
* @return BitSet.
*/
- private BitSet setFilterdIndexToBitSet(DimensionColumnPage dimensionColumnPage,
- int numerOfRows, byte[] defaultValue) {
- BitSet bitSet = new BitSet(numerOfRows);
+ private BitSet setFilteredIndexToBitSet(DimensionColumnPage dimensionColumnPage,
+ int numberOfRows, byte[] defaultValue) {
+ BitSet bitSet = new BitSet(numberOfRows);
byte[][] filterValues = this.filterRangeValues;
// binary search can only be applied if column is sorted
if (isNaturalSorted && dimensionColumnPage.isExplicitSorted()) {
@@ -547,11 +547,11 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
if (null != defaultValue) {
start = CarbonUtil
.getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex,
- numerOfRows - 1, defaultValue, true);
+ numberOfRows - 1, defaultValue, true);
if (start < 0) {
skip = -(start + 1);
// end of block
- if (skip == numerOfRows) {
+ if (skip == numberOfRows) {
return bitSet;
}
} else {
@@ -564,7 +564,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
for (int k = 0; k < filterValues.length; k++) {
start = CarbonUtil
.getFirstIndexUsingBinarySearch(dimensionColumnPage, startIndex,
- numerOfRows - 1, filterValues[k], false);
+ numberOfRows - 1, filterValues[k], false);
if (start >= 0) {
start =
CarbonUtil.nextLesserValueToTarget(start, dimensionColumnPage, filterValues[k]);
@@ -572,8 +572,8 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
if (start < 0) {
start = -(start + 1);
- if (start >= numerOfRows) {
- start = numerOfRows - 1;
+ if (start >= numberOfRows) {
+ start = numberOfRows - 1;
}
// When negative value of start is returned from getFirstIndexUsingBinarySearch the Start
// will be pointing to the next consecutive position. So compare it again and point to the
@@ -594,7 +594,7 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
}
} else {
for (int k = 0; k < filterValues.length; k++) {
- for (int i = 0; i < numerOfRows; i++) {
+ for (int i = 0; i < numberOfRows; i++) {
if (ByteUtil.compare(dimensionColumnPage.getChunkData(i), filterValues[k]) < 0) {
bitSet.set(i);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeTypeExecuterFactory.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeTypeExecutorFactory.java
similarity index 83%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeTypeExecuterFactory.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeTypeExecutorFactory.java
index da9da0a..c872349 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeTypeExecuterFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeTypeExecutorFactory.java
@@ -18,35 +18,35 @@
package org.apache.carbondata.core.scan.filter.executer;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.scan.filter.resolver.RowLevelRangeFilterResolverImpl;
-public class RowLevelRangeTypeExecuterFactory {
+public class RowLevelRangeTypeExecutorFactory {
- private RowLevelRangeTypeExecuterFactory() {
+ private RowLevelRangeTypeExecutorFactory() {
}
/**
- * The method returns the Row Level Range fiter type instance based on
+ * The method returns the Row Level Range filter type instance based on
* filter tree resolver type.
*
* @param filterExpressionResolverTree
* @param segmentProperties
* @return the generator instance
*/
- public static RowLevelFilterExecuterImpl getRowLevelRangeTypeExecuter(
- FilterExecuterType filterExecuterType, FilterResolverIntf filterExpressionResolverTree,
+ public static RowLevelFilterExecutorImpl getRowLevelRangeTypeExecutor(
+ FilterExecutorType filterExecutorType, FilterResolverIntf filterExpressionResolverTree,
SegmentProperties segmentProperties) {
- switch (filterExecuterType) {
+ switch (filterExecutorType) {
case ROWLEVEL_LESSTHAN:
- return new RowLevelRangeLessThanFilterExecuterImpl(
+ return new RowLevelRangeLessThanFilterExecutorImpl(
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getDimColEvaluatorInfoList(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
- .getMsrColEvalutorInfoList(),
+ .getMsrColEvaluatorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
@@ -54,11 +54,11 @@ public class RowLevelRangeTypeExecuterFactory {
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getMeasureFilterRangeValues(), segmentProperties);
case ROWLEVEL_LESSTHAN_EQUALTO:
- return new RowLevelRangeLessThanEqualFilterExecuterImpl(
+ return new RowLevelRangeLessThanEqualFilterExecutorImpl(
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getDimColEvaluatorInfoList(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
- .getMsrColEvalutorInfoList(),
+ .getMsrColEvaluatorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
@@ -66,11 +66,11 @@ public class RowLevelRangeTypeExecuterFactory {
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getMeasureFilterRangeValues(), segmentProperties);
case ROWLEVEL_GREATERTHAN_EQUALTO:
- return new RowLevelRangeGrtrThanEquaToFilterExecuterImpl(
+ return new RowLevelRangeGreaterThanEqualFilterExecutorImpl(
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getDimColEvaluatorInfoList(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
- .getMsrColEvalutorInfoList(),
+ .getMsrColEvaluatorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
@@ -78,11 +78,11 @@ public class RowLevelRangeTypeExecuterFactory {
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getMeasureFilterRangeValues(), segmentProperties);
case ROWLEVEL_GREATERTHAN:
- return new RowLevelRangeGrtThanFiterExecuterImpl(
+ return new RowLevelRangeGreaterThanFilterExecutorImpl(
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
.getDimColEvaluatorInfoList(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
- .getMsrColEvalutorInfoList(),
+ .getMsrColEvaluatorInfoList(),
filterExpressionResolverTree.getFilterExpression(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree).getTableIdentifier(),
((RowLevelRangeFilterResolverImpl) filterExpressionResolverTree)
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
index cde65bb..7014d29 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/TrueFilterExecutor.java
@@ -23,7 +23,7 @@ import org.apache.carbondata.core.scan.filter.intf.RowIntf;
import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
import org.apache.carbondata.core.util.BitSetGroup;
-public class TrueFilterExecutor implements FilterExecuter {
+public class TrueFilterExecutor implements FilterExecutor {
/**
* API will apply filter based on resolver instance
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/intf/FilterExecuterType.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/intf/FilterExecutorType.java
similarity index 94%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/intf/FilterExecuterType.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/intf/FilterExecutorType.java
index 58d95c1..b101aa2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/intf/FilterExecuterType.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/intf/FilterExecutorType.java
@@ -19,7 +19,7 @@ package org.apache.carbondata.core.scan.filter.intf;
import java.io.Serializable;
-public enum FilterExecuterType implements Serializable {
+public enum FilterExecutorType implements Serializable {
INCLUDE, EXCLUDE, OR, AND, RESTRUCTURE, ROWLEVEL, RANGE, ROWLEVEL_GREATERTHAN,
ROWLEVEL_GREATERTHAN_EQUALTO, ROWLEVEL_LESSTHAN_EQUALTO, ROWLEVEL_LESSTHAN, TRUE, FALSE
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/optimizer/RangeFilterOptmizer.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/optimizer/RangeFilterOptimizer.java
similarity index 92%
rename from core/src/main/java/org/apache/carbondata/core/scan/filter/optimizer/RangeFilterOptmizer.java
rename to core/src/main/java/org/apache/carbondata/core/scan/filter/optimizer/RangeFilterOptimizer.java
index 78f04b5..a10297a 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/optimizer/RangeFilterOptmizer.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/optimizer/RangeFilterOptimizer.java
@@ -21,11 +21,11 @@ import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.expression.RangeExpressionEvaluator;
import org.apache.carbondata.core.scan.filter.intf.FilterOptimizer;
-public class RangeFilterOptmizer implements FilterOptimizer {
+public class RangeFilterOptimizer implements FilterOptimizer {
RangeExpressionEvaluator rangeExpEvaluator;
- public RangeFilterOptmizer(Expression filterExpression) {
+ public RangeFilterOptimizer(Expression filterExpression) {
this.rangeExpEvaluator = new RangeExpressionEvaluator(filterExpression);
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
index b7ee46f..bc65114 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
@@ -28,7 +28,7 @@ import org.apache.carbondata.core.scan.expression.conditional.ConditionalExpress
import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
import org.apache.carbondata.core.scan.expression.logical.RangeExpression;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.metadata.FilterResolverMetadata;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
@@ -59,7 +59,7 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
/**
* This API will resolve the filter expression and generates the
* dictionaries for executing/evaluating the filter expressions in the
- * executer layer.
+ * executor layer.
*
* @throws FilterUnsupportedException
*/
@@ -89,7 +89,7 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
// dimColResolvedFilterInfo
//visitable object with filter member values based on the visitor type, currently there
//3 types of visitors custom,direct and no dictionary, all types of visitor populate
- //the visitable instance as per its buisness logic which is different for all the
+ //the visitable instance as per its business logic which is different for all the
// visitors.
if (columnExpression.isMeasure()) {
msrColResolvedFilterInfo.setMeasure(columnExpression.getMeasure());
@@ -160,7 +160,7 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
}
/**
- * Left node will not be presentin this scenario
+ * Left node will not be present in this scenario
*
* @return left node of type FilterResolverIntf instance
*/
@@ -170,7 +170,7 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
}
/**
- * Right node will not be presentin this scenario
+ * Right node will not be present in this scenario
*
* @return left node of type FilterResolverIntf instance
*/
@@ -203,21 +203,21 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
}
/**
- * Method will return the executer type for particular conditional resolver
- * basically two types of executers will be formed for the conditional query.
+ * Method will return the executor type for particular conditional resolver
+ * basically two types of executors will be formed for the conditional query.
*
- * @return the filter executer type
+ * @return the filter executor type
*/
@Override
- public FilterExecuterType getFilterExecuterType() {
+ public FilterExecutorType getFilterExecutorType() {
switch (exp.getFilterExpressionType()) {
case NOT_EQUALS:
case NOT_IN:
- return FilterExecuterType.EXCLUDE;
+ return FilterExecutorType.EXCLUDE;
case RANGE:
- return FilterExecuterType.RANGE;
+ return FilterExecutorType.RANGE;
default:
- return FilterExecuterType.INCLUDE;
+ return FilterExecutorType.INCLUDE;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
index 57123b8..af33330 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/FilterResolverIntf.java
@@ -21,7 +21,7 @@ import java.io.Serializable;
import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
@@ -30,7 +30,7 @@ public interface FilterResolverIntf extends Serializable {
/**
* This API will resolve the filter expression and generates the
* dictionaries for executing/evaluating the filter expressions in the
- * executer layer.
+ * executor layer.
*
* @throws FilterUnsupportedException
*/
@@ -74,7 +74,7 @@ public interface FilterResolverIntf extends Serializable {
*
* @return FilterExecuterType.
*/
- FilterExecuterType getFilterExecuterType();
+ FilterExecutorType getFilterExecutorType();
Expression getFilterExpression();
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
index c8f4106..e838862 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/LogicalFilterResolverImpl.java
@@ -20,7 +20,7 @@ package org.apache.carbondata.core.scan.filter.resolver;
import org.apache.carbondata.core.scan.expression.BinaryExpression;
import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
@@ -30,24 +30,24 @@ public class LogicalFilterResolverImpl implements FilterResolverIntf {
*/
private static final long serialVersionUID = 5734382980564402914L;
- protected FilterResolverIntf leftEvalutor;
+ protected FilterResolverIntf leftEvaluator;
- protected FilterResolverIntf rightEvalutor;
+ protected FilterResolverIntf rightEvaluator;
protected ExpressionType filterExpressionType;
private BinaryExpression filterExpression;
- public LogicalFilterResolverImpl(FilterResolverIntf leftEvalutor,
- FilterResolverIntf rightEvalutor, BinaryExpression currentExpression) {
- this.leftEvalutor = leftEvalutor;
- this.rightEvalutor = rightEvalutor;
+ public LogicalFilterResolverImpl(FilterResolverIntf leftEvaluator,
+ FilterResolverIntf rightEvaluator, BinaryExpression currentExpression) {
+ this.leftEvaluator = leftEvaluator;
+ this.rightEvaluator = rightEvaluator;
this.filterExpressionType = currentExpression.getFilterExpressionType();
this.filterExpression = currentExpression;
}
/**
- * Logical filter resolver will return the left and right filter expresison
+ * Logical filter resolver will return the left and right filter expresion
* node for filter evaluation, so in this instance no implementation is required.
*
*/
@@ -63,7 +63,7 @@ public class LogicalFilterResolverImpl implements FilterResolverIntf {
* @return FilterResolverIntf.
*/
public FilterResolverIntf getLeft() {
- return leftEvalutor;
+ return leftEvaluator;
}
/**
@@ -73,7 +73,7 @@ public class LogicalFilterResolverImpl implements FilterResolverIntf {
* @return FilterResolverIntf.
*/
public FilterResolverIntf getRight() {
- return rightEvalutor;
+ return rightEvaluator;
}
@Override
@@ -87,12 +87,12 @@ public class LogicalFilterResolverImpl implements FilterResolverIntf {
}
@Override
- public FilterExecuterType getFilterExecuterType() {
+ public FilterExecutorType getFilterExecutorType() {
switch (filterExpressionType) {
case OR:
- return FilterExecuterType.OR;
+ return FilterExecutorType.OR;
case AND:
- return FilterExecuterType.AND;
+ return FilterExecutorType.AND;
default:
return null;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
index d2ae50f..ded0385 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelFilterResolverImpl.java
@@ -25,7 +25,7 @@ import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.scan.expression.ColumnExpression;
import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.expression.conditional.ConditionalExpression;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
@@ -64,7 +64,7 @@ public class RowLevelFilterResolverImpl extends ConditionalFilterResolverImpl {
dimColumnEvaluatorInfo.setColumnIndex(columnExpression.getCarbonColumn().getOrdinal());
dimColumnEvaluatorInfo.setRowIndex(index++);
dimColumnEvaluatorInfo.setDimension(columnExpression.getDimension());
- dimColumnEvaluatorInfo.setDimensionExistsInCurrentSilce(false);
+ dimColumnEvaluatorInfo.setDimensionExistsInCurrentSlice(false);
dimColEvaluatorInfoList.add(dimColumnEvaluatorInfo);
} else {
msrColumnEvalutorInfo = new MeasureColumnResolvedFilterInfo();
@@ -87,13 +87,13 @@ public class RowLevelFilterResolverImpl extends ConditionalFilterResolverImpl {
* and will be send to the spark for processing
*/
@Override
- public FilterExecuterType getFilterExecuterType() {
- return FilterExecuterType.ROWLEVEL;
+ public FilterExecutorType getFilterExecutorType() {
+ return FilterExecutorType.ROWLEVEL;
}
/**
* Method will the read filter expression corresponding to the resolver.
- * This method is required in row level executer inorder to evaluate the filter
+ * This method is required in row level executor inorder to evaluate the filter
* expression against spark, as mentioned above row level is a special type
* filter resolver.
*
@@ -115,7 +115,7 @@ public class RowLevelFilterResolverImpl extends ConditionalFilterResolverImpl {
}
/**
- * Method will return the DimColumnResolvedFilterInfo instance which containts
+ * Method will return the DimColumnResolvedFilterInfo instance which contains
* measure level details.
*
* @return MeasureColumnResolvedFilterInfo
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
index 96ae473..16bd417 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/RowLevelRangeFilterResolverImpl.java
@@ -39,7 +39,7 @@ import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedExc
import org.apache.carbondata.core.scan.expression.logical.BinaryLogicalExpression;
import org.apache.carbondata.core.scan.filter.ColumnFilterInfo;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo;
import org.apache.carbondata.core.util.ByteUtil;
@@ -143,18 +143,18 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
} catch (FilterIllegalMemberException e) {
// Any invalid member while evaluation shall be ignored, system will log the
// error only once since all rows the evaluation happens so inorder to avoid
- // too much log inforation only once the log will be printed.
+ // too much log information only once the log will be printed.
FilterUtil.logError(e, invalidRowsPresent);
}
}
- Comparator<byte[]> filterNoDictValueComaparator = new Comparator<byte[]>() {
+ Comparator<byte[]> filterNoDictValueComparator = new Comparator<byte[]>() {
@Override
public int compare(byte[] filterMember1, byte[] filterMember2) {
return ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterMember1, filterMember2);
}
};
- Collections.sort(filterValuesList, filterNoDictValueComaparator);
+ Collections.sort(filterValuesList, filterNoDictValueComparator);
return filterValuesList;
}
@@ -177,7 +177,7 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
} catch (FilterIllegalMemberException e) {
// Any invalid member while evaluation shall be ignored, system will log the
// error only once since all rows the evaluation happens so inorder to avoid
- // too much log inforation only once the log will be printed.
+ // too much log information only once the log will be printed.
FilterUtil.logError(e, invalidRowsPresent);
}
}
@@ -205,7 +205,7 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
dimColumnEvaluatorInfo.setColumnIndex(columnExpression.getCarbonColumn().getOrdinal());
dimColumnEvaluatorInfo.setRowIndex(index++);
dimColumnEvaluatorInfo.setDimension(columnExpression.getDimension());
- dimColumnEvaluatorInfo.setDimensionExistsInCurrentSilce(false);
+ dimColumnEvaluatorInfo.setDimensionExistsInCurrentSlice(false);
if (columnExpression.getDimension().getDataType() == DataTypes.DATE) {
if (!isIncludeFilter) {
filterInfo.setExcludeFilterList(getDirectSurrogateValues(columnExpression));
@@ -228,7 +228,7 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
msrColumnEvalutorInfo.setCarbonColumn(columnExpression.getCarbonColumn());
msrColumnEvalutorInfo.setColumnIndex(columnExpression.getCarbonColumn().getOrdinal());
msrColumnEvalutorInfo.setType(columnExpression.getCarbonColumn().getDataType());
- msrColumnEvalutorInfo.setMeasureExistsInCurrentSilce(false);
+ msrColumnEvalutorInfo.setMeasureExistsInCurrentSlice(false);
filterInfo
.setMeasuresFilterValuesList(getMeasureRangeValues(columnExpression.getMeasure()));
filterInfo.setIncludeFilter(isIncludeFilter);
@@ -298,12 +298,12 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
}
/**
- * Method will return the DimColumnResolvedFilterInfo instance which containts
+ * Method will return the DimColumnResolvedFilterInfo instance which contains
* measure level details.
*
* @return MeasureColumnResolvedFilterInfo
*/
- public List<MeasureColumnResolvedFilterInfo> getMsrColEvalutorInfoList() {
+ public List<MeasureColumnResolvedFilterInfo> getMsrColEvaluatorInfoList() {
return msrColEvalutorInfoList;
}
@@ -316,24 +316,24 @@ public class RowLevelRangeFilterResolverImpl extends ConditionalFilterResolverIm
}
/**
- * This method will provide the executer type to the callee inorder to identify
+ * This method will provide the executor type to the callee inorder to identify
* the executer type for the filter resolution, Row level filter executer is a
* special executer since it get all the rows of the specified filter dimension
* and will be send to the spark for processing
*/
- public FilterExecuterType getFilterExecuterType() {
+ public FilterExecutorType getFilterExecutorType() {
switch (exp.getFilterExpressionType()) {
case GREATERTHAN:
- return FilterExecuterType.ROWLEVEL_GREATERTHAN;
+ return FilterExecutorType.ROWLEVEL_GREATERTHAN;
case GREATERTHAN_EQUALTO:
- return FilterExecuterType.ROWLEVEL_GREATERTHAN_EQUALTO;
+ return FilterExecutorType.ROWLEVEL_GREATERTHAN_EQUALTO;
case LESSTHAN:
- return FilterExecuterType.ROWLEVEL_LESSTHAN;
+ return FilterExecutorType.ROWLEVEL_LESSTHAN;
case LESSTHAN_EQUALTO:
- return FilterExecuterType.ROWLEVEL_LESSTHAN_EQUALTO;
+ return FilterExecutorType.ROWLEVEL_LESSTHAN_EQUALTO;
default:
- return FilterExecuterType.ROWLEVEL;
+ return FilterExecutorType.ROWLEVEL;
}
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
index fd019f6..2d53e5c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
@@ -41,12 +41,12 @@ public class DimColumnResolvedFilterInfo extends ColumnResolvedFilterInfo implem
*/
private int rowIndex = -1;
- private boolean isDimensionExistsInCurrentSilce = true;
+ private boolean isDimensionExistsInCurrentSlice = true;
private CarbonDimension dimension;
/**
- * reolved filter object of a particlar filter Expression.
+ * resolved filter object of a particular filter Expression.
*/
private ColumnFilterInfo resolvedFilterValueObj;
@@ -58,13 +58,13 @@ public class DimColumnResolvedFilterInfo extends ColumnResolvedFilterInfo implem
public void addDimensionResolvedFilterInstance(CarbonDimension dimension,
ColumnFilterInfo filterResolvedObj) {
- List<ColumnFilterInfo> currentVals = dimensionResolvedFilter.get(dimension);
- if (null == currentVals) {
- currentVals = new ArrayList<ColumnFilterInfo>(20);
- currentVals.add(filterResolvedObj);
- dimensionResolvedFilter.put(dimension, currentVals);
+ List<ColumnFilterInfo> currentValues = dimensionResolvedFilter.get(dimension);
+ if (null == currentValues) {
+ currentValues = new ArrayList<ColumnFilterInfo>(20);
+ currentValues.add(filterResolvedObj);
+ dimensionResolvedFilter.put(dimension, currentValues);
} else {
- currentVals.add(filterResolvedObj);
+ currentValues.add(filterResolvedObj);
}
}
@@ -104,12 +104,12 @@ public class DimColumnResolvedFilterInfo extends ColumnResolvedFilterInfo implem
this.rowIndex = rowIndex;
}
- public boolean isDimensionExistsInCurrentSilce() {
- return isDimensionExistsInCurrentSilce;
+ public boolean isDimensionExistsInCurrentSlice() {
+ return isDimensionExistsInCurrentSlice;
}
- public void setDimensionExistsInCurrentSilce(boolean isDimensionExistsInCurrentSilce) {
- this.isDimensionExistsInCurrentSilce = isDimensionExistsInCurrentSilce;
+ public void setDimensionExistsInCurrentSlice(boolean isDimensionExistsInCurrentSilce) {
+ this.isDimensionExistsInCurrentSlice = isDimensionExistsInCurrentSilce;
}
public void populateFilterInfoBasedOnColumnType(ResolvedFilterInfoVisitorIntf visitor,
@@ -134,7 +134,7 @@ public class DimColumnResolvedFilterInfo extends ColumnResolvedFilterInfo implem
dimColumnResolvedFilterInfo.resolvedFilterValueObj = this.resolvedFilterValueObj;
dimColumnResolvedFilterInfo.rowIndex = this.rowIndex;
dimColumnResolvedFilterInfo.dimensionResolvedFilter = this.dimensionResolvedFilter;
- dimColumnResolvedFilterInfo.isDimensionExistsInCurrentSilce = isDimensionExistsInCurrentSilce;
+ dimColumnResolvedFilterInfo.isDimensionExistsInCurrentSlice = isDimensionExistsInCurrentSlice;
dimColumnResolvedFilterInfo.columnIndexInMinMaxByteArray = columnIndexInMinMaxByteArray;
return dimColumnResolvedFilterInfo;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/FalseConditionalResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/FalseConditionalResolverImpl.java
index 838e1ab..9d8e147 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/FalseConditionalResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/FalseConditionalResolverImpl.java
@@ -18,7 +18,7 @@
package org.apache.carbondata.core.scan.filter.resolver.resolverinfo;
import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.ConditionalFilterResolverImpl;
/* The expression with If FALSE will be resolved setting empty bitset. */
@@ -36,18 +36,18 @@ public class FalseConditionalResolverImpl extends ConditionalFilterResolverImpl
}
/**
- * This method will provide the executer type to the callee inorder to identify
- * the executer type for the filter resolution, False Expresssion willl not execute anything.
+ * This method will provide the executor type to the callee inorder to identify
+ * the executer type for the filter resolution, False Expression will not execute anything.
* it will return empty bitset
*/
@Override
- public FilterExecuterType getFilterExecuterType() {
- return FilterExecuterType.FALSE;
+ public FilterExecutorType getFilterExecutorType() {
+ return FilterExecutorType.FALSE;
}
/**
* Method will the read filter expression corresponding to the resolver.
- * This method is required in row level executer inorder to evaluate the filter
+ * This method is required in row level executor inorder to evaluate the filter
* expression against spark, as mentioned above row level is a special type
* filter resolver.
*
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
index 9becac0..14d8193 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/MeasureColumnResolvedFilterInfo.java
@@ -41,14 +41,14 @@ public class MeasureColumnResolvedFilterInfo extends ColumnResolvedFilterInfo
private int rowIndex = -1;
- private boolean isMeasureExistsInCurrentSilce = true;
+ private boolean isMeasureExistsInCurrentSlice = true;
private CarbonColumn carbonColumn;
private CarbonMeasure carbonMeasure;
/**
- * reolved filter object of a particlar filter Expression.
+ * resolved filter object of a particular filter Expression.
*/
private ColumnFilterInfo resolvedFilterValueObj;
@@ -66,13 +66,13 @@ public class MeasureColumnResolvedFilterInfo extends ColumnResolvedFilterInfo
public void addMeasureResolvedFilterInstance(CarbonMeasure measures,
ColumnFilterInfo filterResolvedObj) {
- List<ColumnFilterInfo> currentVals = measureResolvedFilter.get(measures);
- if (null == currentVals) {
- currentVals = new ArrayList<ColumnFilterInfo>(20);
- currentVals.add(filterResolvedObj);
- measureResolvedFilter.put(measures, currentVals);
+ List<ColumnFilterInfo> currentValues = measureResolvedFilter.get(measures);
+ if (null == currentValues) {
+ currentValues = new ArrayList<ColumnFilterInfo>(20);
+ currentValues.add(filterResolvedObj);
+ measureResolvedFilter.put(measures, currentValues);
} else {
- currentVals.add(filterResolvedObj);
+ currentValues.add(filterResolvedObj);
}
}
@@ -121,8 +121,8 @@ public class MeasureColumnResolvedFilterInfo extends ColumnResolvedFilterInfo
throw new UnsupportedOperationException("Operation not supported");
}
- public void setMeasureExistsInCurrentSilce(boolean measureExistsInCurrentSilce) {
- isMeasureExistsInCurrentSilce = measureExistsInCurrentSilce;
+ public void setMeasureExistsInCurrentSlice(boolean measureExistsInCurrentSlice) {
+ isMeasureExistsInCurrentSlice = measureExistsInCurrentSlice;
}
public void setMeasure(CarbonMeasure carbonMeasure) {
@@ -151,7 +151,7 @@ public class MeasureColumnResolvedFilterInfo extends ColumnResolvedFilterInfo
msrColumnResolvedFilterInfo.resolvedFilterValueObj = this.resolvedFilterValueObj;
msrColumnResolvedFilterInfo.rowIndex = this.rowIndex;
msrColumnResolvedFilterInfo.measureResolvedFilter = this.measureResolvedFilter;
- msrColumnResolvedFilterInfo.setMeasureExistsInCurrentSilce(this.isMeasureExistsInCurrentSilce);
+ msrColumnResolvedFilterInfo.setMeasureExistsInCurrentSlice(this.isMeasureExistsInCurrentSlice);
msrColumnResolvedFilterInfo.columnIndexInMinMaxByteArray = columnIndexInMinMaxByteArray;
return msrColumnResolvedFilterInfo;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/TrueConditionalResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/TrueConditionalResolverImpl.java
index d449be2..22add32 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/TrueConditionalResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/TrueConditionalResolverImpl.java
@@ -18,7 +18,7 @@
package org.apache.carbondata.core.scan.filter.resolver.resolverinfo;
import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.core.scan.filter.intf.FilterExecuterType;
+import org.apache.carbondata.core.scan.filter.intf.FilterExecutorType;
import org.apache.carbondata.core.scan.filter.resolver.ConditionalFilterResolverImpl;
/* The expression with If TRUE will be resolved setting all bits to TRUE. */
@@ -36,13 +36,13 @@ public class TrueConditionalResolverImpl extends ConditionalFilterResolverImpl {
}
/**
- * This method will provide the executer type to the callee inorder to identify
+ * This method will provide the executor type to the callee inorder to identify
* the executer type for the filter resolution, Row level filter executer is a
* special executer since it get all the rows of the specified filter dimension
* and will be send to the spark for processing
*/
@Override
- public FilterExecuterType getFilterExecuterType() {
- return FilterExecuterType.TRUE;
+ public FilterExecutorType getFilterExecutorType() {
+ return FilterExecutorType.TRUE;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
index 6d7002c..e81007d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
@@ -39,7 +39,7 @@ public class CustomTypeDictionaryVisitor implements ResolvedFilterInfoVisitorInt
/**
* This Visitor method is been used to resolve or populate the filter details
- * by using custom type dictionary value, the filter membrers will be resolved using
+ * by using custom type dictionary value, the filter members will be resolved using
* custom type function which will generate dictionary for the direct column type filter members
*
* @param visitableObj
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/ResolvedFilterInfoVisitorIntf.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/ResolvedFilterInfoVisitorIntf.java
index eb58ece..ae89536 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/ResolvedFilterInfoVisitorIntf.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/resolverinfo/visitor/ResolvedFilterInfoVisitorIntf.java
@@ -28,7 +28,7 @@ public interface ResolvedFilterInfoVisitorIntf {
* dimColResolvedFilterInfo visitable object with filter member values based
* on the visitor type, currently there 3 types of visitors custom,direct
* and no dictionary, all types of visitor populate the visitable instance
- * as per its buisness logic which is different for all the visitors.
+ * as per its business logic which is different for all the visitors.
*
* @param visitableObj
* @param metadata
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
index faec145..eec5f8d 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
@@ -204,10 +204,10 @@ public class QueryModel {
col.setDimension(dim);
col.setDimension(true);
} else {
- // in case of sdk or fileformat, there can be chance that each carbondata file may have
+ // in case of sdk or file format, there can be chance that each carbondata file may have
// different schema, so every segment properties will have dims and measures based on
// corresponding segment. So the filter column may not be present in it. so generate the
- // dimension and measure from the carbontable
+ // dimension and measure from the carbon table
CarbonDimension dimension =
table.getDimensionByName(col.getColumnName());
CarbonMeasure measure = table.getMeasureByName(col.getColumnName());
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
index 6e3435c..adbba56 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModelBuilder.java
@@ -127,19 +127,19 @@ public class QueryModelBuilder {
Map<Integer, List<Integer>> complexColumnMap = new HashMap<>();
List<ProjectionDimension> carbonDimensions = projection.getDimensions();
// Traverse and find out if the top most parent of projection column is already there
- List<CarbonDimension> projectionDimenesionToBeMerged = new ArrayList<>();
+ List<CarbonDimension> projectionDimensionToBeMerged = new ArrayList<>();
for (ProjectionDimension projectionDimension : carbonDimensions) {
CarbonDimension complexParentDimension =
projectionDimension.getDimension().getComplexParentDimension();
if (null != complexParentDimension && isAlreadyExists(complexParentDimension,
carbonDimensions)) {
- projectionDimenesionToBeMerged.add(projectionDimension.getDimension());
+ projectionDimensionToBeMerged.add(projectionDimension.getDimension());
}
}
- if (projectionDimenesionToBeMerged.size() != 0) {
+ if (projectionDimensionToBeMerged.size() != 0) {
projection =
- removeMergedDimensions(projectionDimenesionToBeMerged, projectionColumns, factTableName);
+ removeMergedDimensions(projectionDimensionToBeMerged, projectionColumns, factTableName);
carbonDimensions = projection.getDimensions();
}
@@ -220,7 +220,7 @@ public class QueryModelBuilder {
for (int j = i; j < childOrdinals.size(); j++) {
CarbonDimension parentDimension = getDimensionBasedOnOrdinal(dimList, childOrdinals.get(i));
CarbonDimension childDimension = getDimensionBasedOnOrdinal(dimList, childOrdinals.get(j));
- if (!mergedChild.contains(childOrdinals.get(j)) && checkChildsInSamePath(parentDimension,
+ if (!mergedChild.contains(childOrdinals.get(j)) && checkChildrenInSamePath(parentDimension,
childDimension)) {
mergedChild.add(childDimension);
}
@@ -229,7 +229,7 @@ public class QueryModelBuilder {
return mergedChild;
}
- private boolean checkChildsInSamePath(CarbonDimension parentDimension,
+ private boolean checkChildrenInSamePath(CarbonDimension parentDimension,
CarbonDimension childDimension) {
if (parentDimension.getColName().equals(childDimension.getColName())) {
return false;
@@ -327,7 +327,7 @@ public class QueryModelBuilder {
boolean[] isFilterMeasures = new boolean[table.getAllMeasures().size()];
queryModel.setIsFilterDimensions(isFilterDimensions);
queryModel.setIsFilterMeasures(isFilterMeasures);
- // In case of Dictionary Include Range Column we donot optimize the range expression
+ // In case of Dictionary Include Range Column we do not optimize the range expression
if (indexFilter != null) {
if (isConvertToRangeFilter()) {
indexFilter.processFilterExpression(isFilterDimensions, isFilterMeasures);
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/processor/BlockletIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/processor/BlockletIterator.java
index f0d81ff..f455321 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/processor/BlockletIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/processor/BlockletIterator.java
@@ -27,7 +27,7 @@ class BlockletIterator extends CarbonIterator<DataRefNode> {
/**
* data store block
*/
- protected DataRefNode datablock;
+ protected DataRefNode dataBlock;
/**
* block counter to keep a track how many block has been processed
*/
@@ -39,18 +39,18 @@ class BlockletIterator extends CarbonIterator<DataRefNode> {
private boolean hasNext = true;
/**
- * total number blocks assgned to this iterator
+ * total number blocks assigned to this iterator
*/
private long totalNumberOfBlocksToScan;
/**
* Constructor
*
- * @param datablock first data block
+ * @param dataBlock first data block
* @param totalNumberOfBlockletToScan total number of blocklets to be scanned
*/
- BlockletIterator(DataRefNode datablock, long totalNumberOfBlockletToScan) {
- this.datablock = datablock;
+ BlockletIterator(DataRefNode dataBlock, long totalNumberOfBlockletToScan) {
+ this.dataBlock = dataBlock;
this.totalNumberOfBlocksToScan = totalNumberOfBlockletToScan;
}
@@ -71,18 +71,18 @@ class BlockletIterator extends CarbonIterator<DataRefNode> {
@Override
public DataRefNode next() {
// get the current blocks
- DataRefNode datablockTemp = datablock;
+ DataRefNode dataBlockTemp = dataBlock;
// store the next data block
- datablock = datablock.getNextDataRefNode();
+ dataBlock = dataBlock.getNextDataRefNode();
// increment the counter
blockCounter++;
// if all the data block is processed then
// set the has next flag to false
// or if number of blocks assigned to this iterator is processed
- // then also set the hasnext flag to false
- if (null == datablock || blockCounter >= this.totalNumberOfBlocksToScan) {
+ // then also set the hasNext flag to false
+ if (null == dataBlock || blockCounter >= this.totalNumberOfBlocksToScan) {
hasNext = false;
}
- return datablockTemp;
+ return dataBlockTemp;
}
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/processor/DataBlockIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/processor/DataBlockIterator.java
index 4420dff..f570c65 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/processor/DataBlockIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/processor/DataBlockIterator.java
@@ -90,7 +90,7 @@ public class DataBlockIterator extends CarbonIterator<List<Object[]>> {
this.fileReader = fileReader;
blockletIterator = new BlockletIterator(blockExecutionInfo.getFirstDataBlock(),
blockExecutionInfo.getNumberOfBlockToScan());
- if (blockExecutionInfo.getFilterExecuterTree() != null) {
+ if (blockExecutionInfo.getFilterExecutorTree() != null) {
blockletScanner = new BlockletFilterScanner(blockExecutionInfo, queryStatisticsModel);
} else {
blockletScanner = new BlockletFullScanner(blockExecutionInfo, queryStatisticsModel);
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java b/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
index a3e921c..95e6327 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/BlockletScannedResult.java
@@ -129,7 +129,7 @@ public abstract class BlockletScannedResult {
private int[] complexParentBlockIndexes;
/**
- * blockletid+pageumber to deleted reocrd map
+ * blockletId+pageNumber to deleted record map
*/
private Map<String, DeleteDeltaVo> deletedRecordMap;
@@ -160,7 +160,7 @@ public abstract class BlockletScannedResult {
this.fixedLengthKeySize = blockExecutionInfo.getFixedLengthKeySize();
this.noDictionaryColumnChunkIndexes = blockExecutionInfo.getNoDictionaryColumnChunkIndexes();
this.dictionaryColumnChunkIndexes = blockExecutionInfo.getDictionaryColumnChunkIndex();
- this.complexParentIndexToQueryMap = blockExecutionInfo.getComlexDimensionInfoMap();
+ this.complexParentIndexToQueryMap = blockExecutionInfo.getComplexDimensionInfoMap();
this.complexParentBlockIndexes = blockExecutionInfo.getComplexColumnParentBlockIndexes();
this.totalDimensionsSize = blockExecutionInfo.getProjectionDimensions().length;
this.deletedRecordMap = blockExecutionInfo.getDeletedRecordsMap();
@@ -263,7 +263,7 @@ public abstract class BlockletScannedResult {
public void fillColumnarComplexBatch(ColumnVectorInfo[] vectorInfos) {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
- ReUsableByteArrayDataOutputStream reuseableDataOutput =
+ ReUsableByteArrayDataOutputStream reusableDataOutput =
new ReUsableByteArrayDataOutputStream(byteStream);
boolean isExceptionThrown = false;
for (int i = 0; i < vectorInfos.length; i++) {
@@ -276,23 +276,23 @@ public abstract class BlockletScannedResult {
vectorInfos[i].genericQueryType
.parseBlocksAndReturnComplexColumnByteArray(dimRawColumnChunks, dimensionColumnPages,
pageFilteredRowId == null ? j : pageFilteredRowId[pageCounter][j], pageCounter,
- reuseableDataOutput);
+ reusableDataOutput);
Object data = vectorInfos[i].genericQueryType
- .getDataBasedOnDataType(ByteBuffer.wrap(reuseableDataOutput.getByteArray()));
+ .getDataBasedOnDataType(ByteBuffer.wrap(reusableDataOutput.getByteArray()));
vector.putObject(vectorOffset++, data);
- reuseableDataOutput.reset();
+ reusableDataOutput.reset();
} catch (IOException e) {
isExceptionThrown = true;
LOGGER.error(e.getMessage(), e);
} finally {
if (isExceptionThrown) {
- CarbonUtil.closeStreams(reuseableDataOutput);
+ CarbonUtil.closeStreams(reusableDataOutput);
CarbonUtil.closeStreams(byteStream);
}
}
}
}
- CarbonUtil.closeStreams(reuseableDataOutput);
+ CarbonUtil.closeStreams(reusableDataOutput);
CarbonUtil.closeStreams(byteStream);
}
@@ -323,7 +323,7 @@ public abstract class BlockletScannedResult {
}
/**
- * Just increment the counter incase of query only on measures.
+ * Just increment the counter in case of query only on measures.
*/
public void incrementCounter() {
rowCounter++;
@@ -442,7 +442,7 @@ public abstract class BlockletScannedResult {
clearValidRowIdList();
}
- public int numberOfpages() {
+ public int numberOfPages() {
return pageFilteredRowCount.length;
}
@@ -521,7 +521,7 @@ public abstract class BlockletScannedResult {
protected List<byte[][]> getComplexTypeKeyArrayBatch() {
List<byte[][]> complexTypeArrayList = new ArrayList<>(validRowIds.size());
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
- ReUsableByteArrayDataOutputStream reUseableDataOutput =
+ ReUsableByteArrayDataOutputStream reusableDataOutput =
new ReUsableByteArrayDataOutputStream(byteStream);
boolean isExceptionThrown = false;
byte[][] complexTypeData = null;
@@ -538,23 +538,23 @@ public abstract class BlockletScannedResult {
try {
genericQueryType
.parseBlocksAndReturnComplexColumnByteArray(dimRawColumnChunks, dimensionColumnPages,
- validRowIds.get(j), pageCounter, reUseableDataOutput);
+ validRowIds.get(j), pageCounter, reusableDataOutput);
// get the key array in columnar way
byte[][] complexKeyArray = complexTypeArrayList.get(j);
complexKeyArray[i] = byteStream.toByteArray();
- reUseableDataOutput.reset();
+ reusableDataOutput.reset();
} catch (IOException e) {
isExceptionThrown = true;
LOGGER.error(e.getMessage(), e);
} finally {
if (isExceptionThrown) {
- CarbonUtil.closeStreams(reUseableDataOutput);
+ CarbonUtil.closeStreams(reusableDataOutput);
CarbonUtil.closeStreams(byteStream);
}
}
}
}
- CarbonUtil.closeStreams(reUseableDataOutput);
+ CarbonUtil.closeStreams(reusableDataOutput);
CarbonUtil.closeStreams(byteStream);
return complexTypeArrayList;
}
@@ -573,7 +573,7 @@ public abstract class BlockletScannedResult {
public void setBlockletId(String blockletId, String blockletNumber) {
this.blockletId = blockletId + CarbonCommonConstants.FILE_SEPARATOR + blockletNumber;
this.blockletNumber = blockletNumber;
- // if deleted recors map is present for this block
+ // if deleted record map is present for this block
// then get the first page deleted vo
if (null != deletedRecordMap) {
String key;
@@ -727,7 +727,7 @@ public abstract class BlockletScannedResult {
public abstract int getCurrentRowId();
/**
- * @return dictionary key array for all the dictionary dimension in integer array forat
+ * @return dictionary key array for all the dictionary dimension in integer array format
* selected in query
*/
public abstract int[] getDictionaryKeyIntegerArray();
@@ -750,7 +750,7 @@ public abstract class BlockletScannedResult {
completeKey = new byte[fixedLengthKeySize];
dictionaryKeyArrayList.add(completeKey);
}
- // initialize offset array onli if data is present
+ // initialize offset array if data is present
if (this.dictionaryColumnChunkIndexes.length > 0) {
columnDataOffsets = new int[validRowIds.size()];
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index 25ba9a0..c186701 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -97,7 +97,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
this.fileReader.setReadPageByPage(queryModel.isReadPageByPage());
this.execService = execService;
initialiseInfos();
- initQueryStatiticsModel();
+ initQueryStatisticsModel();
}
private void initialiseInfos() {
@@ -125,11 +125,11 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
*
* @param dataBlock data block
* @param deleteDeltaInfo delete delta info
- * @return blockid+pageid to deleted row mapping
+ * @return blockId+pageId to deleted row mapping
*/
private Map<String, DeleteDeltaVo> getDeleteDeltaDetails(AbstractIndex dataBlock,
DeleteDeltaInfo deleteDeltaInfo) {
- // if datablock deleted delta timestamp is more then the current delete delta files timestamp
+ // if data block deleted delta timestamp is more then the current delete delta files timestamp
// then return the current deleted rows
if (dataBlock.getDeleteDeltaTimestamp() >= deleteDeltaInfo
.getLatestDeleteDeltaFileTimestamp()) {
@@ -232,7 +232,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
return null;
}
- private void initQueryStatiticsModel() {
+ private void initQueryStatisticsModel() {
this.queryStatisticsModel = new QueryStatisticsModel();
this.queryStatisticsModel.setRecorder(recorder);
QueryStatistic queryStatisticTotalBlocklet = new QueryStatistic();
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/PartitionSpliterRawResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/PartitionSplitterRawResultIterator.java
similarity index 91%
rename from core/src/main/java/org/apache/carbondata/core/scan/result/iterator/PartitionSpliterRawResultIterator.java
rename to core/src/main/java/org/apache/carbondata/core/scan/result/iterator/PartitionSplitterRawResultIterator.java
index 790657a..e9b0d11 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/PartitionSpliterRawResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/PartitionSplitterRawResultIterator.java
@@ -20,13 +20,13 @@ package org.apache.carbondata.core.scan.result.iterator;
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.core.scan.result.RowBatch;
-public class PartitionSpliterRawResultIterator extends CarbonIterator<Object[]> {
+public class PartitionSplitterRawResultIterator extends CarbonIterator<Object[]> {
private CarbonIterator<RowBatch> iterator;
private RowBatch batch;
private int counter;
- public PartitionSpliterRawResultIterator(CarbonIterator<RowBatch> iterator) {
+ public PartitionSplitterRawResultIterator(CarbonIterator<RowBatch> iterator) {
this.iterator = iterator;
}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
index 704df8f..21b4ae9 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
@@ -137,7 +137,7 @@ public class RawResultIterator extends CarbonIterator<Object[]> {
if (!isBackupFilled) {
fetchFuture.get();
}
- // copy backup buffer to current buffer and fill backup buffer asyn
+ // copy backup buffer to current buffer and fill backup buffer asynchronously
currentIdxInBuffer = 0;
currentBuffer.clear();
currentBuffer = backupBuffer;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/CarbonColumnarBatch.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/CarbonColumnarBatch.java
index 471f9b2..634ca7a 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/CarbonColumnarBatch.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/CarbonColumnarBatch.java
@@ -73,7 +73,7 @@ public class CarbonColumnarBatch {
}
/**
- * Mark the rows as filterd first before filling the batch, so that these rows will not be added
+ * Mark the rows as filtered first before filling the batch, so that these rows will not be added
* to vector batches.
* @param rowId
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/AbstractCarbonColumnarVector.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/AbstractCarbonColumnarVector.java
index aeeba53..4a148c1 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/AbstractCarbonColumnarVector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/AbstractCarbonColumnarVector.java
@@ -25,7 +25,7 @@ import org.apache.carbondata.core.scan.result.vector.CarbonDictionary;
import org.apache.carbondata.core.scan.scanner.LazyPageLoader;
public abstract class AbstractCarbonColumnarVector
- implements CarbonColumnVector, ConvertableVector {
+ implements CarbonColumnVector, ConvertibleVector {
protected CarbonColumnVector columnVector;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectFactory.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectFactory.java
index f6d2941..4c7bb07 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectFactory.java
@@ -34,7 +34,7 @@ public final class ColumnarVectorWrapperDirectFactory {
* @param invertedIndex Inverted index of column page
* @param nullBitset row locations of nulls in bitset
* @param deletedRows deleted rows locations in bitset.
- * @param isnullBitsExists whether nullbitset present on this page, usually for dimension columns
+ * @param isnullBitsExists whether nullBitset present on this page, usually for dimension columns
* there is no null bitset.
* @return wrapped CarbonColumnVector
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
index e3a488c..e7aaac9 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDelta.java
@@ -23,7 +23,7 @@ import java.util.BitSet;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
/**
- * Column vector for column pages which has delete delta, so it uses delta biset to filter out
+ * Column vector for column pages which has delete delta, so it uses delta bitset to filter out
* data before filling to actual vector.
*/
class ColumnarVectorWrapperDirectWithDeleteDelta extends AbstractCarbonColumnarVector {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDeltaAndInvertedIndex.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDeltaAndInvertedIndex.java
index e322954..6a188b4 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDeltaAndInvertedIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ColumnarVectorWrapperDirectWithDeleteDeltaAndInvertedIndex.java
@@ -27,8 +27,8 @@ import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl;
/**
- * Column vector for column pages which has delete delta and inverted index, so it uses delta biset
- * to filter out data and use inverted index before filling to actual vector
+ * Column vector for column pages which has delete delta and inverted index, so it uses delta
+ * bitset to filter out data and use inverted index before filling to actual vector
*/
public class ColumnarVectorWrapperDirectWithDeleteDeltaAndInvertedIndex
extends ColumnarVectorWrapperDirectWithInvertedIndex {
@@ -48,7 +48,7 @@ public class ColumnarVectorWrapperDirectWithDeleteDeltaAndInvertedIndex
* @param invertedIndex Inverted index of the column
* @param nullBits Null row ordinals in the bitset
* @param isnullBitsExists whether to consider inverted index while setting null bitset or not.
- * we are having nullbitset even for dimensions also.
+ * we are having nullBitset even for dimensions also.
* But some dimension columns still don't have nullbitset.
* So if null bitset does not exist then
* it should not inverted index while setting the null
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ConvertableVector.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ConvertibleVector.java
similarity index 96%
rename from core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ConvertableVector.java
rename to core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ConvertibleVector.java
index 7020c66..6b1102f 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ConvertableVector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/ConvertibleVector.java
@@ -21,7 +21,7 @@ package org.apache.carbondata.core.scan.result.vector.impl.directread;
* This interface provides method to convert the values by using inverted index and delete delta
* and fill to the underlying vector.
*/
-public interface ConvertableVector {
+public interface ConvertibleVector {
/**
* Convert the values and fill it to the underlying vector.
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/SequentialFill.java b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/SequentialFill.java
index a0df68c..8d8dba6 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/SequentialFill.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/SequentialFill.java
@@ -24,7 +24,7 @@ import org.apache.carbondata.common.annotations.InterfaceStability;
/**
* It is sort of a marker interface to let execution engine know that it is appendable/sequential
- * data adding vector. It means we cannot add random rowids to it.
+ * data adding vector. It means we cannot add random row ids to it.
*/
@InterfaceStability.Evolving
@InterfaceAudience.Internal
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
index 0c3b847..573e8b9 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
@@ -29,7 +29,7 @@ import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
import org.apache.carbondata.core.datastore.page.ColumnPage;
import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.executer.ImplicitColumnFilterExecutor;
import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
import org.apache.carbondata.core.scan.result.BlockletScannedResult;
@@ -50,9 +50,9 @@ import org.apache.carbondata.core.util.CarbonUtil;
public class BlockletFilterScanner extends BlockletFullScanner {
/**
- * filter executer to evaluate filter condition
+ * filter executor to evaluate filter condition
*/
- private FilterExecuter filterExecuter;
+ private FilterExecutor filterExecutor;
/**
* this will be used to apply min max
* this will be useful for dimension column which is on the right side
@@ -79,7 +79,7 @@ public class BlockletFilterScanner extends BlockletFullScanner {
isMinMaxEnabled = Boolean.parseBoolean(minMaxEnableValue);
}
// get the filter tree
- this.filterExecuter = blockExecutionInfo.getFilterExecuterTree();
+ this.filterExecutor = blockExecutionInfo.getFilterExecutorTree();
this.queryStatisticsModel = queryStatisticsModel;
String useBitSetPipeLine = CarbonProperties.getInstance()
@@ -125,15 +125,15 @@ public class BlockletFilterScanner extends BlockletFullScanner {
}
BitSet bitSet = null;
// check for implicit include filter instance
- if (filterExecuter instanceof ImplicitColumnFilterExecutor) {
+ if (filterExecutor instanceof ImplicitColumnFilterExecutor) {
String blockletId = blockExecutionInfo.getBlockIdString() +
CarbonCommonConstants.FILE_SEPARATOR + dataBlock.blockletIndex();
- bitSet = ((ImplicitColumnFilterExecutor) filterExecuter)
+ bitSet = ((ImplicitColumnFilterExecutor) filterExecutor)
.isFilterValuesPresentInBlockOrBlocklet(
dataBlock.getColumnsMaxValue(),
dataBlock.getColumnsMinValue(), blockletId, dataBlock.minMaxFlagArray());
} else {
- bitSet = this.filterExecuter
+ bitSet = this.filterExecutor
.isScanRequired(dataBlock.getColumnsMaxValue(),
dataBlock.getColumnsMinValue(), dataBlock.minMaxFlagArray());
}
@@ -145,7 +145,7 @@ public class BlockletFilterScanner extends BlockletFullScanner {
@Override
public void readBlocklet(RawBlockletColumnChunks rawBlockletColumnChunks) throws IOException {
long startTime = System.currentTimeMillis();
- this.filterExecuter.readColumnChunks(rawBlockletColumnChunks);
+ this.filterExecutor.readColumnChunks(rawBlockletColumnChunks);
// adding statistics for carbon read time
QueryStatistic readTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
.get(QueryStatisticsConstants.READ_BLOCKlET_TIME);
@@ -172,11 +172,11 @@ public class BlockletFilterScanner extends BlockletFullScanner {
private BlockletScannedResult executeFilter(RawBlockletColumnChunks rawBlockletColumnChunks)
throws FilterUnsupportedException, IOException {
long startTime = System.currentTimeMillis();
- // set the indexed data if it has any during fgindex pruning.
+ // set the indexed data if it has any during fgIndex pruning.
BitSetGroup fgBitSetGroup = rawBlockletColumnChunks.getDataBlock().getIndexedData();
rawBlockletColumnChunks.setBitSetGroup(fgBitSetGroup);
// apply filter on actual data, for each page
- BitSetGroup bitSetGroup = this.filterExecuter.applyFilter(rawBlockletColumnChunks,
+ BitSetGroup bitSetGroup = this.filterExecutor.applyFilter(rawBlockletColumnChunks,
useBitSetPipeLine);
// if filter result is empty then return with empty result
if (bitSetGroup.isEmpty()) {
@@ -362,7 +362,7 @@ public class BlockletFilterScanner extends BlockletFullScanner {
throws FilterUnsupportedException, IOException {
long startTime = System.currentTimeMillis();
// apply filter on actual data, for each page
- BitSet pages = this.filterExecuter.prunePages(rawBlockletColumnChunks);
+ BitSet pages = this.filterExecutor.prunePages(rawBlockletColumnChunks);
// if filter result is empty then return with empty result
if (pages.isEmpty()) {
CarbonUtil.freeMemory(rawBlockletColumnChunks.getDimensionRawColumnChunks(),
diff --git a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatistic.java b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatistic.java
index b21bb94..d1fe3c3 100644
--- a/core/src/main/java/org/apache/carbondata/core/stats/QueryStatistic.java
+++ b/core/src/main/java/org/apache/carbondata/core/stats/QueryStatistic.java
@@ -42,7 +42,7 @@ public class QueryStatistic implements Serializable {
private long timeTaken;
/**
- * starttime of the phase
+ * start time of the phase
*/
private long startTime;
@@ -71,10 +71,10 @@ public class QueryStatistic implements Serializable {
* For example total time taken for scan or result preparation
*
* @param message statistic message
- * @param timetaken
+ * @param timeTaken
*/
- public void addFixedTimeStatistic(String message, long timetaken) {
- this.timeTaken = timetaken;
+ public void addFixedTimeStatistic(String message, long timeTaken) {
+ this.timeTaken = timeTaken;
this.message = message;
}
@@ -95,7 +95,7 @@ public class QueryStatistic implements Serializable {
if (StringUtils.isEmpty(queryWithTaskId)) {
return message + timeTaken;
}
- return message + " for the taskid : " + queryWithTaskId + " Is : " + timeTaken;
+ return message + " for the task id : " + queryWithTaskId + " Is : " + timeTaken;
}
public String getMessage() {
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
index 7fb2cbd..b97109b 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
@@ -86,7 +86,7 @@ public class LoadMetadataDetails implements Serializable {
/**
* Segment modification or deletion time stamp
*/
- private String modificationOrdeletionTimesStamp;
+ private String modificationOrDeletionTimestamp;
private String loadStartTime;
@@ -167,21 +167,21 @@ public class LoadMetadataDetails implements Serializable {
}
/**
- * @return the modificationOrdeletionTimesStamp
+ * @return the modificationOrDeletionTimesStamp
*/
- public long getModificationOrdeletionTimesStamp() {
- if (null == modificationOrdeletionTimesStamp) {
+ public long getModificationOrDeletionTimestamp() {
+ if (null == modificationOrDeletionTimestamp) {
return 0;
}
- return convertTimeStampToLong(modificationOrdeletionTimesStamp);
+ return convertTimeStampToLong(modificationOrDeletionTimestamp);
}
/**
- * @param modificationOrdeletionTimesStamp the modificationOrdeletionTimesStamp to set
+ * @param modificationOrDeletionTimestamp the modificationOrDeletionTimesStamp to set
*/
- public void setModificationOrdeletionTimesStamp(long modificationOrdeletionTimesStamp) {
- this.modificationOrdeletionTimesStamp =
- Long.toString(modificationOrdeletionTimesStamp);
+ public void setModificationOrDeletionTimestamp(long modificationOrDeletionTimestamp) {
+ this.modificationOrDeletionTimestamp =
+ Long.toString(modificationOrDeletionTimestamp);
}
/* (non-Javadoc)
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
index 16b2a4e..2d2060e 100755
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
@@ -96,7 +96,7 @@ public class SegmentStatusManager {
}
/**
- * This will return the lock object used to lock the table status file before updation.
+ * This will return the lock object used to lock the table status file before update.
*
* @return
*/
@@ -347,7 +347,7 @@ public class SegmentStatusManager {
throw ex;
}
try {
- LOG.warn("Failed to read table status file, retrycount:" + retry);
+ LOG.warn("Failed to read table status file, retry count:" + retry);
// sleep for some time before retry
TimeUnit.SECONDS.sleep(READ_TABLE_STATUS_RETRY_TIMEOUT);
} catch (InterruptedException e) {
@@ -696,7 +696,7 @@ public class SegmentStatusManager {
} else if (SegmentStatus.MARKED_FOR_DELETE != segmentStatus) {
loadFound = true;
loadMetadata.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE);
- loadMetadata.setModificationOrdeletionTimesStamp(CarbonUpdateUtil.readCurrentTime());
+ loadMetadata.setModificationOrDeletionTimestamp(CarbonUpdateUtil.readCurrentTime());
LOG.info("Segment ID " + loadId + " Marked for Delete");
}
break;
@@ -805,7 +805,7 @@ public class SegmentStatusManager {
}
/**
- * updates segment status and modificaton time details
+ * updates segment status and modification time details
*
* @param loadMetadata
*/
@@ -813,7 +813,7 @@ public class SegmentStatusManager {
// update status only if the segment is not marked for delete
if (SegmentStatus.MARKED_FOR_DELETE != loadMetadata.getSegmentStatus()) {
loadMetadata.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE);
- loadMetadata.setModificationOrdeletionTimesStamp(CarbonUpdateUtil.readCurrentTime());
+ loadMetadata.setModificationOrDeletionTimestamp(CarbonUpdateUtil.readCurrentTime());
}
}
@@ -883,13 +883,13 @@ public class SegmentStatusManager {
LoadMetadataDetails[] listOfLoadFolderDetailsArray =
SegmentStatusManager.readLoadMetadata(metaPath);
if (listOfLoadFolderDetailsArray.length != 0) {
- for (LoadMetadataDetails loaddetail :listOfLoadFolderDetailsArray) {
- SegmentStatus segmentStatus = loaddetail.getSegmentStatus();
+ for (LoadMetadataDetails loadDetail :listOfLoadFolderDetailsArray) {
+ SegmentStatus segmentStatus = loadDetail.getSegmentStatus();
if (segmentStatus == SegmentStatus.INSERT_IN_PROGRESS
|| segmentStatus == SegmentStatus.INSERT_OVERWRITE_IN_PROGRESS) {
loadInProgress =
isLoadInProgress(carbonTable.getAbsoluteTableIdentifier(),
- loaddetail.getLoadName());
+ loadDetail.getLoadName());
}
}
}
@@ -928,12 +928,12 @@ public class SegmentStatusManager {
LoadMetadataDetails[] listOfLoadFolderDetailsArray =
SegmentStatusManager.readLoadMetadata(metaPath);
if (listOfLoadFolderDetailsArray.length != 0) {
- for (LoadMetadataDetails loaddetail :listOfLoadFolderDetailsArray) {
- SegmentStatus segmentStatus = loaddetail.getSegmentStatus();
+ for (LoadMetadataDetails loadDetail :listOfLoadFolderDetailsArray) {
+ SegmentStatus segmentStatus = loadDetail.getSegmentStatus();
if (segmentStatus == SegmentStatus.INSERT_OVERWRITE_IN_PROGRESS) {
loadInProgress =
isLoadInProgress(carbonTable.getAbsoluteTableIdentifier(),
- loaddetail.getLoadName());
+ loadDetail.getLoadName());
}
}
}
@@ -1037,13 +1037,13 @@ public class SegmentStatusManager {
}
}
- private static ReturnTuple isUpdationRequired(boolean isForceDeletion, CarbonTable carbonTable,
+ private static ReturnTuple isUpdateRequired(boolean isForceDeletion, CarbonTable carbonTable,
AbsoluteTableIdentifier absoluteTableIdentifier, LoadMetadataDetails[] details) {
// Delete marked loads
- boolean isUpdationRequired = DeleteLoadFolders
+ boolean isUpdateRequired = DeleteLoadFolders
.deleteLoadFoldersFromFileSystem(absoluteTableIdentifier, isForceDeletion, details,
carbonTable.getMetadataPath());
- return new ReturnTuple(details, isUpdationRequired);
+ return new ReturnTuple(details, isUpdateRequired);
}
public static void deleteLoadsAndUpdateMetadata(CarbonTable carbonTable, boolean isForceDeletion,
@@ -1054,24 +1054,24 @@ public class SegmentStatusManager {
CarbonLockUtil.deleteExpiredSegmentLockFiles(carbonTable);
if (isLoadDeletionRequired(metadataDetails)) {
AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
- boolean updationCompletionStatus = false;
+ boolean updateCompletionStatus = false;
LoadMetadataDetails[] newAddedLoadHistoryList = null;
ReturnTuple tuple =
- isUpdationRequired(isForceDeletion, carbonTable, identifier, metadataDetails);
+ isUpdateRequired(isForceDeletion, carbonTable, identifier, metadataDetails);
if (tuple.isUpdateRequired) {
ICarbonLock carbonTableStatusLock =
CarbonLockFactory.getCarbonLockObj(identifier, LockUsage.TABLE_STATUS_LOCK);
boolean locked = false;
try {
- // Update load metadate file after cleaning deleted nodes
+ // Update load metadata file after cleaning deleted nodes
locked = carbonTableStatusLock.lockWithRetries();
if (locked) {
LOG.info("Table status lock has been successfully acquired.");
- // Again read status and check to verify updation required or not.
+ // Again read status and check to verify update required or not.
LoadMetadataDetails[] details =
SegmentStatusManager.readLoadMetadata(carbonTable.getMetadataPath());
ReturnTuple tuple2 =
- isUpdationRequired(isForceDeletion, carbonTable, identifier, details);
+ isUpdateRequired(isForceDeletion, carbonTable, identifier, details);
if (!tuple2.isUpdateRequired) {
return;
}
@@ -1110,7 +1110,7 @@ public class SegmentStatusManager {
CarbonTablePath.getTableStatusFilePath(identifier.getTablePath()),
latestStatus.toArray(new LoadMetadataDetails[0]));
}
- updationCompletionStatus = true;
+ updateCompletionStatus = true;
} else {
String dbName = identifier.getCarbonTableIdentifier().getDatabaseName();
String tableName = identifier.getCarbonTableIdentifier().getTableName();
@@ -1125,7 +1125,7 @@ public class SegmentStatusManager {
if (locked) {
CarbonLockUtil.fileUnlock(carbonTableStatusLock, LockUsage.TABLE_STATUS_LOCK);
}
- if (updationCompletionStatus) {
+ if (updateCompletionStatus) {
DeleteLoadFolders
.physicalFactAndMeasureMetadataDeletion(carbonTable, newAddedLoadHistoryList,
isForceDeletion, partitionSpecs);
@@ -1141,7 +1141,7 @@ public class SegmentStatusManager {
carbonTable.getAbsoluteTableIdentifier(), LockUsage.TABLE_STATUS_LOCK);
boolean locked = false;
try {
- // Update load metadate file after cleaning deleted nodes
+ // Update load metadata file after cleaning deleted nodes
locked = carbonTableStatusLock.lockWithRetries();
if (locked) {
LOG.info("Table status lock has been successfully acquired.");
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
index d547c3d..f9a3ee7 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
@@ -120,7 +120,7 @@ public class SegmentUpdateStatusManager {
*/
private void updateUpdateDetails(String updateVersion) {
if (updateVersion != null) {
- List<SegmentUpdateDetails> newupdateDetails = new ArrayList<>();
+ List<SegmentUpdateDetails> newUpdateDetails = new ArrayList<>();
for (SegmentUpdateDetails updateDetail : updateDetails) {
if (updateDetail.getDeltaFileStamps() != null) {
if (updateDetail.getDeltaFileStamps().contains(updateVersion)) {
@@ -128,14 +128,14 @@ public class SegmentUpdateStatusManager {
set.add(updateVersion);
updateDetail.setDeltaFileStamps(set);
updateDetail.setSegmentStatus(SegmentStatus.SUCCESS);
- newupdateDetails.add(updateDetail);
+ newUpdateDetails.add(updateDetail);
}
} else if (updateDetail.getDeleteDeltaStartTimestamp().equalsIgnoreCase(updateVersion)) {
updateDetail.setSegmentStatus(SegmentStatus.SUCCESS);
- newupdateDetails.add(updateDetail);
+ newUpdateDetails.add(updateDetail);
}
}
- updateDetails = newupdateDetails.toArray(new SegmentUpdateDetails[0]);
+ updateDetails = newUpdateDetails.toArray(new SegmentUpdateDetails[0]);
}
}
@@ -169,7 +169,7 @@ public class SegmentUpdateStatusManager {
/**
*
- * @param key will be like (segid/blockname) 0/0-0-5464654654654
+ * @param key will be like (segmentId/blockName) 0/0-0-5464654654654
* @return
*/
public SegmentUpdateDetails getDetailsForABlock(String key) {
@@ -203,7 +203,7 @@ public class SegmentUpdateStatusManager {
}
/**
- * This will return the lock object used to lock the table update status file before updation.
+ * This will return the lock object used to lock the table update status file before updating.
*
* @return
*/
@@ -269,8 +269,8 @@ public class SegmentUpdateStatusManager {
}
});
- for (CarbonFile cfile : files) {
- updatedDeltaFilesList.add(cfile.getCanonicalPath());
+ for (CarbonFile file : files) {
+ updatedDeltaFilesList.add(file.getCanonicalPath());
}
return updatedDeltaFilesList;
@@ -293,11 +293,11 @@ public class SegmentUpdateStatusManager {
private List<String> getDeltaFiles(String blockPath, String segment, String extension) {
Path path = new Path(blockPath);
String completeBlockName = path.getName();
- String blockNameWithoutExtn =
+ String blockNameWithoutExtension =
completeBlockName.substring(0, completeBlockName.lastIndexOf('.'));
//blockName without timestamp
final String blockNameFromTuple =
- blockNameWithoutExtn.substring(0, blockNameWithoutExtn.lastIndexOf("-"));
+ blockNameWithoutExtension.substring(0, blockNameWithoutExtension.lastIndexOf("-"));
return getDeltaFiles(path.getParent().toString(), blockNameFromTuple, extension, segment);
}
@@ -362,7 +362,7 @@ public class SegmentUpdateStatusManager {
new StringBuilder(blockDir).append(CarbonCommonConstants.FILE_SEPARATOR)
.append(block.getBlockName()).append("-")
.append(block.getDeleteDeltaStartTimestamp()).append(extension).toString());
- // If deltatimestamps list has data then it has multiple delta file so construct the file
+ // If delta timestamp list has data then it has multiple delta file so construct the file
// directly with list of deltas with out listing
} else if (block.getDeltaFileStamps() != null && block.getDeltaFileStamps().size() > 0) {
for (String delta : block.getDeltaFileStamps()) {
@@ -372,7 +372,7 @@ public class SegmentUpdateStatusManager {
.toString());
}
} else {
- // It is for backward compatability.It lists the files.
+ // It is for backward compatibility.It lists the files.
return getFilePaths(blockDir, blockNameFromTuple, extension, deleteFileList,
deltaStartTimestamp, deltaEndTimeStamp);
}
@@ -397,8 +397,8 @@ public class SegmentUpdateStatusManager {
}
});
deltaList = new ArrayList<>(files.length);
- for (CarbonFile cfile : files) {
- deltaList.add(cfile.getCanonicalPath());
+ for (CarbonFile file : files) {
+ deltaList.add(file.getCanonicalPath());
}
segmentDeleteDeltaListMap.put(blockDir, deltaList);
}
@@ -469,7 +469,7 @@ public class SegmentUpdateStatusManager {
/**
* Returns all update delta files of specified Segment.
*
- * @param loadMetadataDetail metadatadetails of segment
+ * @param loadMetadataDetail metadata details of segment
* @param validUpdateFiles if true then only the valid range files will be returned.
* @return
*/
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/StageInputCollector.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/StageInputCollector.java
index f8e590c..0a3c35d 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/StageInputCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/StageInputCollector.java
@@ -38,7 +38,7 @@ import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import static org.apache.carbondata.core.util.path.CarbonTablePath.SUCCESS_FILE_SUBFIX;
+import static org.apache.carbondata.core.util.path.CarbonTablePath.SUCCESS_FILE_SUFFIX;
import com.google.gson.Gson;
import org.apache.commons.io.IOUtils;
@@ -89,10 +89,10 @@ public class StageInputCollector {
CarbonFile[] allFiles = dir.listFiles();
Map<String, CarbonFile> map = new HashMap<>();
Arrays.stream(allFiles)
- .filter(file -> file.getName().endsWith(SUCCESS_FILE_SUBFIX))
+ .filter(file -> file.getName().endsWith(SUCCESS_FILE_SUFFIX))
.forEach(file -> map.put(file.getName().substring(0, file.getName().indexOf(".")), file));
Arrays.stream(allFiles)
- .filter(file -> !file.getName().endsWith(SUCCESS_FILE_SUBFIX))
+ .filter(file -> !file.getName().endsWith(SUCCESS_FILE_SUFFIX))
.filter(file -> map.containsKey(file.getName()))
.forEach(carbonFile -> {
stageInputList.add(carbonFile);
diff --git a/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java b/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
index eee2f10..f8ee3b8 100644
--- a/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
+++ b/core/src/main/java/org/apache/carbondata/core/stream/StreamPruner.java
@@ -33,7 +33,7 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
import org.apache.carbondata.core.reader.CarbonIndexFileReader;
import org.apache.carbondata.core.scan.filter.FilterUtil;
-import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecutor;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.util.CarbonMetadataUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;
@@ -43,7 +43,7 @@ import org.apache.carbondata.format.BlockIndex;
public class StreamPruner {
private CarbonTable carbonTable;
- private FilterExecuter filterExecuter;
+ private FilterExecutor filterExecutor;
private int totalFileNums = 0;
@@ -66,13 +66,13 @@ public class StreamPruner {
carbonTable.getTableInfo().getFactTable().getListOfColumns();
// initial filter executor
SegmentProperties segmentProperties = new SegmentProperties(listOfColumns);
- filterExecuter = FilterUtil.getFilterExecuterTree(
+ filterExecutor = FilterUtil.getFilterExecutorTree(
filterExp, segmentProperties, null, minMaxCacheColumns, false);
}
}
public List<StreamFile> prune(List<Segment> segments) throws IOException {
- if (filterExecuter == null) {
+ if (filterExecutor == null) {
// if filter is null, list all steam files
return listAllStreamFiles(segments, false);
} else {
@@ -95,7 +95,7 @@ public class StreamPruner {
}
byte[][] maxValue = streamFile.getMinMaxIndex().getMaxValues();
byte[][] minValue = streamFile.getMinMaxIndex().getMinValues();
- BitSet bitSet = filterExecuter
+ BitSet bitSet = filterExecutor
.isScanRequired(maxValue, minValue, streamFile.getMinMaxIndex().getIsMinMaxSet());
if (!bitSet.isEmpty()) {
return true;
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index 5e505b3..5c0422c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -320,10 +320,10 @@ public abstract class AbstractDataFileFooterConverter {
}
private List<ParentColumnTableRelation> fromThriftToWrapperParentTableColumnRelations(
- List<org.apache.carbondata.format.ParentColumnTableRelation> thirftParentColumnRelation) {
+ List<org.apache.carbondata.format.ParentColumnTableRelation> thriftParentColumnRelation) {
List<ParentColumnTableRelation> parentColumnTableRelationList = new ArrayList<>();
for (org.apache.carbondata.format.ParentColumnTableRelation carbonTableRelation :
- thirftParentColumnRelation) {
+ thriftParentColumnRelation) {
RelationIdentifier relationIdentifier =
new RelationIdentifier(carbonTableRelation.getRelationIdentifier().getDatabaseName(),
carbonTableRelation.getRelationIdentifier().getTableName(),
diff --git a/core/src/main/java/org/apache/carbondata/core/util/BlockletIndexUtil.java b/core/src/main/java/org/apache/carbondata/core/util/BlockletIndexUtil.java
index a4d3c12..32dfb4c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/BlockletIndexUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/BlockletIndexUtil.java
@@ -146,7 +146,7 @@ public class BlockletIndexUtil {
/**
* This method will create file name to block Meta Info Mapping. This method will reduce the
- * number of namenode calls and using this method one namenode will fetch 1000 entries
+ * number of nameNode calls and using this method one namenode will fetch 1000 entries
*
* @param segmentFilePath
* @return
@@ -431,7 +431,7 @@ public class BlockletIndexUtil {
} else {
// check if all the filter dimensions are cached
for (CarbonDimension filterDimension : filterDimensions) {
- // complex dimensions are not allwed to be specified in COLUMN_META_CACHE property, so
+ // complex dimensions are not allowed to be specified in COLUMN_META_CACHE property, so
// cannot validate for complex columns
if (filterDimension.isComplex()) {
continue;
diff --git a/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java b/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
index 1cfeaaa..cbfe82e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
@@ -87,19 +87,19 @@ public final class ByteUtil {
/**
* convert number in byte to more readable format
- * @param sizeInbyte
+ * @param sizeInByte
* @return
*/
- public static String convertByteToReadable(long sizeInbyte) {
+ public static String convertByteToReadable(long sizeInByte) {
String readableSize;
- if (sizeInbyte < CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR) {
- readableSize = sizeInbyte + " Byte";
- } else if (sizeInbyte < CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR *
+ if (sizeInByte < CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR) {
+ readableSize = sizeInByte + " Byte";
+ } else if (sizeInByte < CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR *
CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR) {
- readableSize = sizeInbyte / CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR + " KB";
+ readableSize = sizeInByte / CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR + " KB";
} else {
- readableSize = sizeInbyte / CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR /
+ readableSize = sizeInByte / CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR /
CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR + " MB";
}
return readableSize;
@@ -155,7 +155,7 @@ public final class ByteUtil {
long diff = lw ^ rw;
if (diff != 0) {
- if (!CarbonUnsafe.ISLITTLEENDIAN) {
+ if (!CarbonUnsafe.IS_LITTLE_ENDIAN) {
return lessThanUnsigned(lw, rw) ? -1 : 1;
}
@@ -230,7 +230,7 @@ public final class ByteUtil {
long diff = lw ^ rw;
if (diff != 0) {
- if (!CarbonUnsafe.ISLITTLEENDIAN) {
+ if (!CarbonUnsafe.IS_LITTLE_ENDIAN) {
return lessThanUnsigned(lw, rw) ? -1 : 1;
}
@@ -351,7 +351,7 @@ public final class ByteUtil {
}
/**
- * Stirng => byte[]
+ * String => byte[]
*
* @param s
* @return
@@ -443,7 +443,7 @@ public final class ByteUtil {
}
short n = 0;
if (CarbonUnsafe.getUnsafe() != null) {
- if (CarbonUnsafe.ISLITTLEENDIAN) {
+ if (CarbonUnsafe.IS_LITTLE_ENDIAN) {
n = Short.reverseBytes(
CarbonUnsafe.getUnsafe().getShort(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET));
} else {
@@ -513,7 +513,7 @@ public final class ByteUtil {
}
int n = 0;
if (CarbonUnsafe.getUnsafe() != null) {
- if (CarbonUnsafe.ISLITTLEENDIAN) {
+ if (CarbonUnsafe.IS_LITTLE_ENDIAN) {
n = Integer.reverseBytes(
CarbonUnsafe.getUnsafe().getInt(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET));
} else {
@@ -582,7 +582,7 @@ public final class ByteUtil {
}
long l = 0;
if (CarbonUnsafe.getUnsafe() != null) {
- if (CarbonUnsafe.ISLITTLEENDIAN) {
... 2075 lines suppressed ...