You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2018/06/30 07:17:09 UTC
carbondata git commit: [CARBONDATA-2545] Fix some spell error in
CarbonData
Repository: carbondata
Updated Branches:
refs/heads/master ca201604a -> e30a84cc5
[CARBONDATA-2545] Fix some spell error in CarbonData
This closes #2419
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/e30a84cc
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/e30a84cc
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/e30a84cc
Branch: refs/heads/master
Commit: e30a84cc53182f6413dc4030e77eab9d12dce9eb
Parents: ca20160
Author: xubo245 <xu...@huawei.com>
Authored: Wed Jun 27 14:36:26 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Sat Jun 30 15:16:55 2018 +0800
----------------------------------------------------------------------
.../apache/carbondata/core/datamap/DataMapStoreManager.java | 6 +++---
.../columnar/impl/MultiDimKeyVarLengthEquiSplitGenerator.java | 4 ++--
.../carbondata/core/metadata/schema/table/DataMapSchema.java | 4 ++--
.../org/apache/carbondata/core/scan/filter/FilterUtil.java | 2 +-
.../carbondata/datamap/lucene/LuceneFineGrainDataMap.java | 2 +-
.../src/test/scala/org/apache/spark/util/SparkUtil4Test.scala | 6 +++---
.../org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala | 6 +++---
.../scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala | 2 +-
8 files changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 96d2b1c..0d3e40d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -179,7 +179,7 @@ public final class DataMapStoreManager {
*/
public synchronized void registerDataMapCatalog(DataMapProvider dataMapProvider,
DataMapSchema dataMapSchema) throws IOException {
- intializeDataMapCatalogs(dataMapProvider);
+ initializeDataMapCatalogs(dataMapProvider);
String name = dataMapSchema.getProviderName();
DataMapCatalog dataMapCatalog = dataMapCatalogs.get(name);
if (dataMapCatalog == null) {
@@ -215,7 +215,7 @@ public final class DataMapStoreManager {
*/
public synchronized DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider,
String providerName) throws IOException {
- intializeDataMapCatalogs(dataMapProvider);
+ initializeDataMapCatalogs(dataMapProvider);
return dataMapCatalogs.get(providerName);
}
@@ -223,7 +223,7 @@ public final class DataMapStoreManager {
* Initialize by reading all datamaps from store and re register it
* @param dataMapProvider
*/
- private void intializeDataMapCatalogs(DataMapProvider dataMapProvider) throws IOException {
+ private void initializeDataMapCatalogs(DataMapProvider dataMapProvider) throws IOException {
if (dataMapCatalogs == null) {
dataMapCatalogs = new ConcurrentHashMap<>();
List<DataMapSchema> dataMapSchemas = getAllDataMapSchemas();
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/core/src/main/java/org/apache/carbondata/core/keygenerator/columnar/impl/MultiDimKeyVarLengthEquiSplitGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/columnar/impl/MultiDimKeyVarLengthEquiSplitGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/columnar/impl/MultiDimKeyVarLengthEquiSplitGenerator.java
index 586f881..53f68d6 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/columnar/impl/MultiDimKeyVarLengthEquiSplitGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/columnar/impl/MultiDimKeyVarLengthEquiSplitGenerator.java
@@ -46,10 +46,10 @@ public class MultiDimKeyVarLengthEquiSplitGenerator extends MultiDimKeyVarLength
public MultiDimKeyVarLengthEquiSplitGenerator(int[] lens, byte dimensionsToSplit) {
super(lens);
this.dimensionsToSplit = dimensionsToSplit;
- intialize();
+ initialize();
}
- private void intialize() {
+ private void initialize() {
byte s = 0;
List<Set<Integer>> splitList =
new ArrayList<Set<Integer>>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchema.java
index 7f6e86f..e373fae 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchema.java
@@ -198,8 +198,8 @@ public class DataMapSchema implements Serializable, Writable {
public void readFields(DataInput in) throws IOException {
this.dataMapName = in.readUTF();
this.providerName = in.readUTF();
- boolean isRelationIdnentifierExists = in.readBoolean();
- if (isRelationIdnentifierExists) {
+ boolean isRelationIdentifierExists = in.readBoolean();
+ if (isRelationIdentifierExists) {
this.relationIdentifier = new RelationIdentifier(null, null, null);
this.relationIdentifier.readFields(in);
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 9741915..b5fd0b7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -940,7 +940,7 @@ public final class FilterUtil {
/**
* Algorithm for getting the start key for a filter
* step 1: Iterate through each dimension and verify whether its not an exclude filter.
- * step 2: Intialize start key with the first filter member value present in each filter model
+ * step 2: Initialize start key with the first filter member value present in each filter model
* for the respective dimensions.
* step 3: since its a no dictionary start key there will only actual value so compare
* the first filter model value with respect to the dimension data type.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
index b26ab53..63f8d7a 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
@@ -143,7 +143,7 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
}
LOGGER.info(
- "Time taken to intialize lucene searcher: " + (System.currentTimeMillis() - startTime));
+ "Time taken to initialize lucene searcher: " + (System.currentTimeMillis() - startTime));
}
private IndexSearcher createIndexSearcher(Path indexPath) throws IOException {
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/integration/spark-common-test/src/test/scala/org/apache/spark/util/SparkUtil4Test.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/spark/util/SparkUtil4Test.scala b/integration/spark-common-test/src/test/scala/org/apache/spark/util/SparkUtil4Test.scala
index 163a88c..746e24e 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/spark/util/SparkUtil4Test.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/spark/util/SparkUtil4Test.scala
@@ -26,7 +26,7 @@ import org.apache.spark.{SparkConf, TaskContextImpl}
*/
object SparkUtil4Test {
- private var intializedMock = false
+ private var initializedMock = false
def getConfiguredLocalDirs(conf: SparkConf): Array[String] = {
Utils.getConfiguredLocalDirs(conf)
@@ -41,13 +41,13 @@ object SparkUtil4Test {
* @param sqlContext
*/
def createTaskMockUp(sqlContext: SQLContext): Unit = {
- if (!intializedMock) {
+ if (!initializedMock) {
if (sqlContext.sparkContext.version.startsWith("2.1")) {
createTaskMockUp2_1
} else if (sqlContext.sparkContext.version.startsWith("2.2")) {
createTaskMockUp2_2()
}
- intializedMock = true
+ initializedMock = true
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
index 5ed39fa..80b2d12 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
@@ -229,7 +229,7 @@ class NewCarbonDataLoadRDD[K, V](
theSplit.index,
null,
loadMetadataDetails)
- // Intialize to set carbon properties
+ // Initialize to set carbon properties
loader.initialize()
val executor = new DataLoadExecutor()
// in case of success, failure or cancelation clear memory and stop execution
@@ -383,7 +383,7 @@ class NewDataFrameLoaderRDD[K, V](
theSplit.index,
null,
loadMetadataDetails)
- // Intialize to set carbon properties
+ // Initialize to set carbon properties
loader.initialize()
val executor = new DataLoadExecutor
// in case of success, failure or cancelation clear memory and stop execution
@@ -575,7 +575,7 @@ class PartitionTableDataLoaderRDD[K, V](
theSplit.index,
null,
loadMetadataDetails)
- // Intialize to set carbon properties
+ // Initialize to set carbon properties
loader.initialize()
val executor = new DataLoadExecutor
// in case of success, failure or cancelation clear memory and stop execution
http://git-wip-us.apache.org/repos/asf/carbondata/blob/e30a84cc/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
index b6dd6b8..2e7c307 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
@@ -50,7 +50,7 @@ object UpdateDataLoad {
index,
null,
loadMetadataDetails)
- // Intialize to set carbon properties
+ // Initialize to set carbon properties
loader.initialize()
loadMetadataDetails.setSegmentStatus(SegmentStatus.SUCCESS)