You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by xu...@apache.org on 2018/12/29 16:20:46 UTC

carbondata git commit: [CARBONDATA-3126] Correct some spell errors in CarbonData

Repository: carbondata
Updated Branches:
  refs/heads/master e193df0a1 -> 3e4638b33


[CARBONDATA-3126] Correct some spell errors in CarbonData

This PR fix some spell errors in CarbonData.

This closes #3034


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/3e4638b3
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/3e4638b3
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/3e4638b3

Branch: refs/heads/master
Commit: 3e4638b3380670ace454bb3a59579ddd72349fab
Parents: e193df0
Author: tisonkong <36...@qq.com>
Authored: Sat Dec 29 10:02:27 2018 +0800
Committer: xubo245 <xu...@huawei.com>
Committed: Sun Dec 30 00:20:10 2018 +0800

----------------------------------------------------------------------
 .../org/apache/carbondata/common/Strings.java   |  6 ++---
 .../apache/carbondata/common/StringsSuite.java  |  2 +-
 .../core/constants/CarbonCommonConstants.java   |  2 +-
 .../core/datastore/filesystem/CarbonFile.java   |  2 +-
 .../datastore/filesystem/LocalCarbonFile.java   |  2 +-
 .../core/datastore/impl/FileFactory.java        |  2 +-
 .../core/metadata/schema/table/CarbonTable.java |  2 +-
 .../impl/DictionaryBasedResultCollector.java    |  8 +++---
 .../DictionaryBasedVectorResultCollector.java   | 11 ++++----
 .../core/statusmanager/LoadMetadataDetails.java |  2 +-
 .../statusmanager/SegmentStatusManager.java     |  2 +-
 .../carbondata/core/util/CarbonProperties.java  |  2 +-
 .../apache/carbondata/core/util/CarbonUtil.java |  2 +-
 .../core/CarbonPropertiesValidationTest.java    |  2 +-
 docs/hive-guide.md                              |  4 +--
 .../examples/SparkSessionExample.scala          |  6 ++---
 .../carbondata/examples/util/ExampleUtils.scala |  4 +--
 .../hadoop/api/CarbonFileInputFormat.java       |  2 +-
 .../hadoop/api/CarbonTableInputFormat.java      |  2 +-
 .../sdv/generated/ColumndictTestCase.scala      |  4 +--
 .../sdv/generated/DataLoadingTestCase.scala     |  2 +-
 .../complexType/TestComplexTypeQuery.scala      | 20 +++++++-------
 .../testsuite/datamap/TestDataMapCommand.scala  |  2 +-
 .../StandardPartitionTableQueryTestCase.scala   | 10 +++----
 .../org/apache/carbondata/api/CarbonStore.scala |  8 +++---
 .../spark/sql/test/TestQueryExecutor.scala      |  2 +-
 .../apache/spark/sql/test/util/QueryTest.scala  |  2 +-
 .../sql/carbondata/datasource/TestUtil.scala    |  2 +-
 .../command/mutation/DeleteExecution.scala      |  2 +-
 .../preaaggregate/PreAggregateListeners.scala   |  2 +-
 .../strategy/CarbonLateDecodeStrategy.scala     | 12 ++++-----
 .../sql/test/Spark2TestQueryExecutor.scala      |  4 +--
 .../spark/util/AllDictionaryTestCase.scala      |  4 +--
 .../util/ExternalColumnDictionaryTestCase.scala |  4 +--
 .../processing/datatypes/StructDataType.java    |  2 +-
 .../loading/model/CarbonLoadModelBuilder.java   | 28 ++++++++++----------
 36 files changed, 88 insertions(+), 87 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/common/src/main/java/org/apache/carbondata/common/Strings.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/Strings.java b/common/src/main/java/org/apache/carbondata/common/Strings.java
index 35c24ba..4bb9dc8 100644
--- a/common/src/main/java/org/apache/carbondata/common/Strings.java
+++ b/common/src/main/java/org/apache/carbondata/common/Strings.java
@@ -28,14 +28,14 @@ public class Strings {
    * Provide same function as mkString in Scala.
    * This is added to avoid JDK 8 dependency.
    */
-  public static String mkString(String[] strings, String delimeter) {
+  public static String mkString(String[] strings, String delimiter) {
     Objects.requireNonNull(strings);
-    Objects.requireNonNull(delimeter);
+    Objects.requireNonNull(delimiter);
     StringBuilder builder = new StringBuilder();
     for (int i = 0; i < strings.length; i++) {
       builder.append(strings[i]);
       if (i != strings.length - 1) {
-        builder.append(delimeter);
+        builder.append(delimiter);
       }
     }
     return builder.toString();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/common/src/test/java/org/apache/carbondata/common/StringsSuite.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/carbondata/common/StringsSuite.java b/common/src/test/java/org/apache/carbondata/common/StringsSuite.java
index 65da32b..ccabaf7 100644
--- a/common/src/test/java/org/apache/carbondata/common/StringsSuite.java
+++ b/common/src/test/java/org/apache/carbondata/common/StringsSuite.java
@@ -28,7 +28,7 @@ public class StringsSuite {
   }
 
   @Test(expected = NullPointerException.class)
-  public void testMkStringNullDelimeter() {
+  public void testMkStringNullDelimiter() {
     Strings.mkString(new String[]{"abc"}, null);
   }
   

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 11b2f38..8d0a4d9 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1122,7 +1122,7 @@ public final class CarbonCommonConstants {
   public static final int CARBON_SORT_STORAGE_INMEMORY_IN_MB_DEFAULT = 512;
 
   /*
-   * whether to enable prefetch for rowbatch to enhance row reconstruction during compaction
+   * whether to enable prefetch for rowBatch to enhance row reconstruction during compaction
    */
   @CarbonProperty
   public static final String CARBON_COMPACTION_PREFETCH_ENABLE =

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/CarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/CarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/CarbonFile.java
index ce50259..be08338 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/CarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/CarbonFile.java
@@ -36,7 +36,7 @@ public interface CarbonFile {
 
   CarbonFile[] listFiles();
 
-  List<CarbonFile> listFiles(Boolean recurssive) throws IOException;
+  List<CarbonFile> listFiles(Boolean recursive) throws IOException;
 
   List<CarbonFile> listFiles(boolean recursive, CarbonFileFilter fileFilter) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
index 98d61f8..2cace55 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFile.java
@@ -168,7 +168,7 @@ public class LocalCarbonFile implements CarbonFile {
   }
 
   @Override
-  public List<CarbonFile> listFiles(Boolean recurssive) {
+  public List<CarbonFile> listFiles(Boolean recursive) {
     if (!file.isDirectory()) {
       return new ArrayList<CarbonFile>();
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
index 25a59f8..e951f58 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
@@ -190,7 +190,7 @@ public final class FileFactory {
    * @param fileType
    * @param bufferSize
    * @param compressorName name of compressor to write this file
-   * @return data out put stram
+   * @return data out put stream
    * @throws IOException
    */
   public static DataOutputStream getDataOutputStream(String path, FileType fileType, int bufferSize,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index 82bf148..daaed9d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -391,7 +391,7 @@ public class CarbonTable implements Serializable {
   }
 
   /**
-   * This method will add implict dimension into carbontable
+   * This method will add implicit dimension into carbontable
    *
    * @param dimensionOrdinal
    * @param dimensions

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
index 69b142f..cdabbc0 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedResultCollector.java
@@ -62,7 +62,7 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
 
   boolean[] directDictionaryEncodingArray;
 
-  private boolean[] implictColumnArray;
+  private boolean[] implicitColumnArray;
 
   private boolean[] complexDataTypeArray;
 
@@ -213,7 +213,7 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
         // complex dictionary columns comes.
         ByteBuffer buffer;
         if (!dictionaryEncodingArray[i]) {
-          if (implictColumnArray[i]) {
+          if (implicitColumnArray[i]) {
             throw new RuntimeException("Not Supported Column Type");
           } else if (complexDataTypeArray[i]) {
             buffer = ByteBuffer.wrap(complexTypeKeyArray[complexTypeComplexColumnIndex++]);
@@ -242,7 +242,7 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
       byte[][] noDictionaryKeys, byte[][] complexTypeKeyArray,
       Map<Integer, GenericQueryType> complexDimensionInfoMap, Object[] row, int i) {
     if (!dictionaryEncodingArray[i]) {
-      if (implictColumnArray[i]) {
+      if (implicitColumnArray[i]) {
         if (CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID
             .equals(queryDimensions[i].getColumnName())) {
           row[order[i]] = DataTypeUtil.getDataBasedOnDataType(
@@ -347,7 +347,7 @@ public class DictionaryBasedResultCollector extends AbstractScannedResultCollect
 
     dictionaryEncodingArray = CarbonUtil.getDictionaryEncodingArray(queryDimensions);
     directDictionaryEncodingArray = CarbonUtil.getDirectDictionaryEncodingArray(queryDimensions);
-    implictColumnArray = CarbonUtil.getImplicitColumnArray(queryDimensions);
+    implicitColumnArray = CarbonUtil.getImplicitColumnArray(queryDimensions);
     complexDataTypeArray = CarbonUtil.getComplexDataTypeArray(queryDimensions);
 
     parentToChildColumnsMap.clear();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
index 430a555..727e969 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java
@@ -60,7 +60,7 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
 
   ColumnVectorInfo[] allColumnInfo;
 
-  private ColumnVectorInfo[] implictColumnInfo;
+  private ColumnVectorInfo[] implicitColumnInfo;
 
   private boolean isDirectVectorFill;
 
@@ -85,14 +85,14 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
     List<ColumnVectorInfo> dictInfoList = new ArrayList<>();
     List<ColumnVectorInfo> noDictInfoList = new ArrayList<>();
     List<ColumnVectorInfo> complexList = new ArrayList<>();
-    List<ColumnVectorInfo> implictColumnList = new ArrayList<>();
+    List<ColumnVectorInfo> implicitColumnList = new ArrayList<>();
     for (int i = 0; i < queryDimensions.length; i++) {
       if (!dimensionInfo.getDimensionExists()[i]) {
         continue;
       }
       if (queryDimensions[i].getDimension().hasEncoding(Encoding.IMPLICIT)) {
         ColumnVectorInfo columnVectorInfo = new ColumnVectorInfo();
-        implictColumnList.add(columnVectorInfo);
+        implicitColumnList.add(columnVectorInfo);
         columnVectorInfo.dimension = queryDimensions[i];
         columnVectorInfo.ordinal = queryDimensions[i].getDimension().getOrdinal();
         allColumnInfo[queryDimensions[i].getOrdinal()] = columnVectorInfo;
@@ -145,7 +145,8 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
     dictionaryInfo = dictInfoList.toArray(new ColumnVectorInfo[dictInfoList.size()]);
     noDictionaryInfo = noDictInfoList.toArray(new ColumnVectorInfo[noDictInfoList.size()]);
     complexInfo = complexList.toArray(new ColumnVectorInfo[complexList.size()]);
-    implictColumnInfo = implictColumnList.toArray(new ColumnVectorInfo[implictColumnList.size()]);
+    implicitColumnInfo = implicitColumnList.toArray(
+            new ColumnVectorInfo[implicitColumnList.size()]);
     Arrays.sort(dictionaryInfo);
     Arrays.sort(complexInfo);
   }
@@ -194,7 +195,7 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC
     scannedResult.fillColumnarNoDictionaryBatch(noDictionaryInfo);
     scannedResult.fillColumnarMeasureBatch(measureColumnInfo, measureInfo.getMeasureOrdinals());
     scannedResult.fillColumnarComplexBatch(complexInfo);
-    scannedResult.fillColumnarImplicitBatch(implictColumnInfo);
+    scannedResult.fillColumnarImplicitBatch(implicitColumnInfo);
     // it means fetched all data out of page so increment the page counter
     if (availableRows == requiredRows) {
       scannedResult.incrementPageCounter();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
index b19e774..7a16379 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
@@ -99,7 +99,7 @@ public class LoadMetadataDetails implements Serializable {
   private static final Logger LOGGER =
       LogServiceFactory.getLogService(LoadMetadataDetails.class.getName());
 
-  // dont remove static as the write will fail.
+  // don't remove static as the write will fail.
   private static final SimpleDateFormat parser =
       new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_MILLIS);
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
index 32b1e78..4a5063f 100755
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
@@ -356,7 +356,7 @@ public class SegmentStatusManager {
         if (listOfLoadFolderDetailsArray.length != 0) {
           updateDeletionStatus(identifier, loadIds, listOfLoadFolderDetailsArray, invalidLoadIds);
           if (invalidLoadIds.isEmpty()) {
-            // All or None , if anything fails then dont write
+            // All or None , if anything fails then don't write
             if (carbonTableStatusLock.lockWithRetries()) {
               LOG.info("Table status lock has been successfully acquired");
               // To handle concurrency scenarios, always take latest metadata before writing

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 93d622d..1caecad 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -84,7 +84,7 @@ public final class CarbonProperties {
   private static final CarbonProperties CARBONPROPERTIESINSTANCE = new CarbonProperties();
 
   /**
-   * porpeties .
+   * Properties
    */
   private Properties carbonProperties;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index fc4704e..3fb54f0 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -560,7 +560,7 @@ public final class CarbonUtil {
   }
 
   /**
-   * From beeline if a delimeter is passed as \001, in code we get it as
+   * From beeline if a delimiter is passed as \001, in code we get it as
    * escaped string as \\001. So this method will unescape the slash again and
    * convert it back t0 \001
    *

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java b/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
index b2b03cd..2500f71 100644
--- a/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
@@ -37,7 +37,7 @@ public class CarbonPropertiesValidationTest extends TestCase {
     carbonProperties = CarbonProperties.getInstance();
   }
 
-  @Test public void testvalidateLockType()
+  @Test public void testValidateLockType()
       throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
     Method validateMethodType = carbonProperties.getClass().getDeclaredMethod("validateLockType");
     validateMethodType.setAccessible(true);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/docs/hive-guide.md
----------------------------------------------------------------------
diff --git a/docs/hive-guide.md b/docs/hive-guide.md
index e675057..0e52f1b 100644
--- a/docs/hive-guide.md
+++ b/docs/hive-guide.md
@@ -58,9 +58,9 @@ import org.apache.spark.sql.CarbonSession._
 val rootPath = "hdfs:///user/hadoop/carbon"
 val storeLocation = s"$rootPath/store"
 val warehouse = s"$rootPath/warehouse"
-val metastoredb = s"$rootPath/metastore_db"
+val metaStoreDB = s"$rootPath/metastore_db"
 
-val carbon = SparkSession.builder().enableHiveSupport().config("spark.sql.warehouse.dir", warehouse).config(org.apache.carbondata.core.constants.CarbonCommonConstants.STORE_LOCATION, storeLocation).getOrCreateCarbonSession(storeLocation, metastoredb)
+val carbon = SparkSession.builder().enableHiveSupport().config("spark.sql.warehouse.dir", warehouse).config(org.apache.carbondata.core.constants.CarbonCommonConstants.STORE_LOCATION, storeLocation).getOrCreateCarbonSession(storeLocation, metaStoreDB)
 
 carbon.sql("create table hive_carbon(id int, name string, scale decimal, country string, salary double) STORED BY 'carbondata'")
 carbon.sql("LOAD DATA INPATH '<hdfs store path>/sample.csv' INTO TABLE hive_carbon")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
index 1164658..66d4b71 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
@@ -36,14 +36,14 @@ object SparkSessionExample {
                             + "../../../..").getCanonicalPath
     val storeLocation = s"$rootPath/examples/spark2/target/store"
     val warehouse = s"$rootPath/examples/spark2/target/warehouse"
-    val metastoredb = s"$rootPath/examples/spark2/target/metastore_db"
+    val metaStoreDB = s"$rootPath/examples/spark2/target/metastore_db"
 
     // clean data folder
     if (true) {
       val clean = (path: String) => FileUtils.deleteDirectory(new File(path))
       clean(storeLocation)
       clean(warehouse)
-      clean(metastoredb)
+      clean(metaStoreDB)
     }
 
     val sparksession = SparkSession
@@ -53,7 +53,7 @@ object SparkSessionExample {
       .enableHiveSupport()
       .config("spark.sql.warehouse.dir", warehouse)
       .config("javax.jdo.option.ConnectionURL",
-        s"jdbc:derby:;databaseName=$metastoredb;create=true")
+        s"jdbc:derby:;databaseName=$metaStoreDB;create=true")
       .getOrCreate()
 
     CarbonProperties.getInstance()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
index 3064d69..bb9f4d0 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
@@ -35,7 +35,7 @@ object ExampleUtils {
                             + "../../../..").getCanonicalPath
     val storeLocation = s"$rootPath/examples/spark2/target/store"
     val warehouse = s"$rootPath/examples/spark2/target/warehouse"
-    val metastoredb = s"$rootPath/examples/spark2/target"
+    val metaStoreDB = s"$rootPath/examples/spark2/target"
 
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
@@ -57,7 +57,7 @@ object ExampleUtils {
       .config("spark.sql.warehouse.dir", warehouse)
       .config("spark.driver.host", "localhost")
       .config("spark.sql.crossJoin.enabled", "true")
-      .getOrCreateCarbonSession(storeLocation, metastoredb)
+      .getOrCreateCarbonSession(storeLocation, metaStoreDB)
 
     spark.sparkContext.setLogLevel("ERROR")
     spark

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
index dbfa4ec..7c08dd9 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
@@ -157,7 +157,7 @@ public class CarbonFileInputFormat<T> extends CarbonInputFormat<T> implements Se
       List<InputSplit> splits = new ArrayList<>();
       boolean useBlockDataMap = job.getConfiguration().getBoolean("filter_blocks", true);
       // useBlockDataMap would be false in case of SDK when user has not provided any filter, In
-      // this case we dont want to load block/blocklet datamap. It would be true in all other
+      // this case we don't want to load block/blocklet datamap. It would be true in all other
       // scenarios
       if (useBlockDataMap) {
         // do block filtering and get split

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java
index ea1bcd3..c56b1db 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java
@@ -634,7 +634,7 @@ public class CarbonTableInputFormat<T> extends CarbonInputFormat<T> {
       String segmentId = Segment.toSegment(blocklet.getSegmentId()).getSegmentNo();
       String key = CarbonUpdateUtil.getSegmentBlockNameKey(segmentId, blockName);
 
-      // if block is invalid then dont add the count
+      // if block is invalid then don't add the count
       SegmentUpdateDetails details = updateStatusManager.getDetailsForABlock(key);
 
       if (null == details || !CarbonUpdateUtil.isBlockInvalid(details.getSegmentStatus())) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala
index c8e8f1b..54740c6 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ColumndictTestCase.scala
@@ -28,7 +28,7 @@ import org.scalatest.BeforeAndAfterAll
 class ColumndictTestCase extends QueryTest with BeforeAndAfterAll {
          
 
-  //Load history data from CSV with/without header and specify/dont specify headers in command using external ALL_dictionary_PATH
+  //Load history data from CSV with/without header and specify/don't specify headers in command using external ALL_dictionary_PATH
   test("Columndict-TC001", Include) {
      sql(s"""drop table if exists t3""").collect
    sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
@@ -37,7 +37,7 @@ class ColumndictTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Load history data from CSV with/without header and specify/dont specify headers in command using external columndict
+  //Load history data from CSV with/without header and specify/don't specify headers in command using external columndict
   test("Columndict-TC002", Include) {
      sql(s"""CREATE TABLE IF NOT EXISTS t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
     sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/columndict/data.csv' into table t3 options('COLUMNDICT'='country:$resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
index 24a5aa4..172cb64 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
@@ -244,7 +244,7 @@ class DataLoadingTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Show loads-->Delimeter_check
+  //Show loads-->Delimiter_check
   test("BadRecord_Dataload_021", Include) {
     sql(
       s"""CREATE TABLE bad_records_test5 (String_col string,integer_col int,decimal_column

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
index 6728cdf..e5f79e7 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
@@ -47,8 +47,8 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists structusingstructHive")
     sql("drop table if exists structusingarraycarbon")
     sql("drop table if exists structusingarrayhive")
-    sql("drop table if exists complexcarbonwithspecialchardelimeter")
-    sql("drop table if exists complexhivewithspecialchardelimeter")
+    sql("drop table if exists complexcarbonwithspecialchardelimiter")
+    sql("drop table if exists complexhivewithspecialchardelimiter")
     sql(
       "create table complexcarbontable(deviceInformationId int, channelsId string, ROMSize " +
       "string, ROMName String, purchasedate string, mobile struct<imei:string, imsi:string>, MAC " +
@@ -128,7 +128,7 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
   test(
     "Test ^ * special character data loading for complex types") {
     sql(
-      "create table complexcarbonwithspecialchardelimeter(deviceInformationId int, channelsId " +
+      "create table complexcarbonwithspecialchardelimiter(deviceInformationId int, channelsId " +
       "string, ROMSize string, ROMName String, purchasedate string, mobile struct<imei:string, " +
       "imsi:string>, MAC array<string>, locationinfo array<struct<ActiveAreaId:int, " +
       "ActiveCountry:string, ActiveProvince:string, Activecity:string, ActiveDistrict:string, " +
@@ -136,12 +136,12 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
       "activeDeactivedate:array<string>>, gamePointId double,contractNumber double)  STORED BY " +
       "'org.apache.carbondata.format'");
     sql("LOAD DATA local inpath '" + resourcesPath +
-        "/complextypespecialchardelimiter.csv' INTO table complexcarbonwithspecialchardelimeter  " +
+        "/complextypespecialchardelimiter.csv' INTO table complexcarbonwithspecialchardelimiter  " +
         "OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 'FILEHEADER'='deviceInformationId,channelsId," +
         "ROMSize,ROMName,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId," +
         "contractNumber', 'COMPLEX_DELIMITER_LEVEL_1'='^', 'COMPLEX_DELIMITER_LEVEL_2'='*')");
     sql(
-      "create table complexhivewithspecialchardelimeter(deviceInformationId int, channelsId " +
+      "create table complexhivewithspecialchardelimiter(deviceInformationId int, channelsId " +
       "string, ROMSize string, ROMName String, purchasedate string, mobile struct<imei:string, " +
       "imsi:string>, MAC array<string>, locationinfo array<struct<ActiveAreaId:int, " +
       "ActiveCountry:string, ActiveProvince:string, Activecity:string, ActiveDistrict:string, " +
@@ -150,11 +150,11 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
       "delimited fields terminated by ',' collection items terminated by '^' map keys terminated " +
       "by '*'")
     sql("LOAD DATA local inpath '" + resourcesPath +
-        "/complextypespecialchardelimiter.csv' INTO table complexhivewithspecialchardelimeter");
-    checkAnswer(sql("select * from complexcarbonwithspecialchardelimeter"),
-      sql("select * from complexhivewithspecialchardelimeter"))
-    sql("drop table if exists complexcarbonwithspecialchardelimeter")
-    sql("drop table if exists complexhivewithspecialchardelimeter")
+        "/complextypespecialchardelimiter.csv' INTO table complexhivewithspecialchardelimiter");
+    checkAnswer(sql("select * from complexcarbonwithspecialchardelimiter"),
+      sql("select * from complexhivewithspecialchardelimiter"))
+    sql("drop table if exists complexcarbonwithspecialchardelimiter")
+    sql("drop table if exists complexhivewithspecialchardelimiter")
   }
 
   test("complex filter set1") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
index edd3e9c..ffe1977 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
@@ -302,7 +302,7 @@ class TestDataMapCommand extends QueryTest with BeforeAndAfterAll {
   test("create pre-agg table with path") {
     sql("drop table if exists main_preagg")
     sql("drop table if exists main ")
-    val warehouse = s"$metastoredb/warehouse"
+    val warehouse = s"$metaStoreDB/warehouse"
     val path = warehouse + "/" + System.nanoTime + "_preAggTestPath"
     sql(
       s"""

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala
index c7957c1..c19c0b9 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala
@@ -304,7 +304,7 @@ test("Creation of partition table should fail if the colname in table schema and
       """create table partitionTable (id int,name String) partitioned by(email string) stored by 'carbondata'
       """.stripMargin)
     sql("insert into partitionTable select 1,'huawei','abc'")
-    val location = metastoredb +"/" +"def"
+    val location = metaStoreDB +"/" +"def"
     checkAnswer(sql("show partitions partitionTable"), Seq(Row("email=abc")))
     sql(s"""alter table partitionTable add partition (email='def') location '$location'""")
     sql("insert into partitionTable select 1,'huawei','def'")
@@ -323,7 +323,7 @@ test("Creation of partition table should fail if the colname in table schema and
         | PARTITIONED BY (empname String)
         | STORED BY 'org.apache.carbondata.format'
       """.stripMargin)
-    val location = metastoredb +"/" +"ravi"
+    val location = metaStoreDB +"/" +"ravi"
     sql(s"""alter table staticpartitionlocload add partition (empname='ravi') location '$location'""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE staticpartitionlocload partition(empname='ravi') OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     val frame = sql("select count(empno) from staticpartitionlocload")
@@ -348,7 +348,7 @@ test("Creation of partition table should fail if the colname in table schema and
         | PARTITIONED BY (empname String)
         | STORED BY 'org.apache.carbondata.format'
       """.stripMargin)
-    val location = metastoredb +"/" +"ravi1"
+    val location = metaStoreDB +"/" +"ravi1"
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE staticpartitionsetloc partition(empname='ravi') OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     intercept[Exception] {
       sql(s"""alter table staticpartitionsetloc partition (empname='ravi') set location '$location'""")
@@ -369,7 +369,7 @@ test("Creation of partition table should fail if the colname in table schema and
         | PARTITIONED BY (empname String)
         | STORED BY 'org.apache.carbondata.format'
       """.stripMargin)
-    val location = metastoredb +"/" +"ravi"
+    val location = metaStoreDB +"/" +"ravi"
     sql(s"""alter table staticpartitionlocloadother add partition (empname='ravi') location '$location'""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE staticpartitionlocloadother partition(empname='ravi') OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE staticpartitionlocloadother partition(empname='indra') OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -405,7 +405,7 @@ test("Creation of partition table should fail if the colname in table schema and
         | PARTITIONED BY (empname String)
         | STORED BY 'org.apache.carbondata.format'
       """.stripMargin)
-    val location = metastoredb +"/" +"ravi1"
+    val location = metaStoreDB +"/" +"ravi1"
     sql(s"""alter table staticpartitionlocloadother_new add partition (empname='ravi') location '$location'""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE staticpartitionlocloadother_new partition(empname='ravi') OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE staticpartitionlocloadother_new partition(empname='indra') OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
index 45d472e..da9d4c2 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
@@ -192,9 +192,9 @@ object CarbonStore {
       }
     } finally {
       if (currentTablePartitions.equals(None)) {
-        cleanUpPartitionFoldersRecurssively(carbonTable, List.empty[PartitionSpec])
+        cleanUpPartitionFoldersRecursively(carbonTable, List.empty[PartitionSpec])
       } else {
-        cleanUpPartitionFoldersRecurssively(carbonTable, currentTablePartitions.get.toList)
+        cleanUpPartitionFoldersRecursively(carbonTable, currentTablePartitions.get.toList)
       }
 
       if (carbonCleanFilesLock != null) {
@@ -204,12 +204,12 @@ object CarbonStore {
   }
 
   /**
-   * delete partition folders recurssively
+   * delete partition folders recursively
    *
    * @param carbonTable
    * @param partitionSpecList
    */
-  def cleanUpPartitionFoldersRecurssively(carbonTable: CarbonTable,
+  def cleanUpPartitionFoldersRecursively(carbonTable: CarbonTable,
       partitionSpecList: List[PartitionSpec]): Unit = {
     if (carbonTable != null) {
       val loadMetadataDetails = SegmentStatusManager

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
index f69a142..0af832b 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
@@ -76,7 +76,7 @@ object TestQueryExecutor {
     // Otherwise point to respective target folder location
     localTarget
   }
-  val metastoredb = target
+  val metaStoreDB = target
   val location = s"$target/dbpath"
   val masterUrl = {
     val property = System.getProperty("spark.master.url")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
index 5a26dd5..411d5a3 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
@@ -118,7 +118,7 @@ class QueryTest extends PlanTest {
   lazy val storeLocation = CarbonProperties.getInstance().
     getProperty(CarbonCommonConstants.STORE_LOCATION)
   val resourcesPath = TestQueryExecutor.resourcesPath
-  val metastoredb = TestQueryExecutor.metastoredb
+  val metaStoreDB = TestQueryExecutor.metaStoreDB
   val integrationPath = TestQueryExecutor.integrationPath
   val dblocation = TestQueryExecutor.location
   val defaultParallelism = sqlContext.sparkContext.defaultParallelism

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
index 994ec43..672e972 100644
--- a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
+++ b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
@@ -41,7 +41,7 @@ object TestUtil {
                           + "../../../..").getCanonicalPath
   val warehouse1 = FileFactory.getPath(s"$rootPath/integration/spark-datasource/target/warehouse").toString
   val resource = s"$rootPath/integration/spark-datasource/src/test/resources"
-  val metastoredb1 = s"$rootPath/integration/spark-datasource/target"
+  val metaStoreDB1 = s"$rootPath/integration/spark-datasource/target"
   val spark = SparkSession
     .builder()
     .enableHiveSupport()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
index 0f68004..a88a02b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
@@ -283,7 +283,7 @@ object DeleteExecution {
         } catch {
           case e : MultipleMatchingException =>
             LOGGER.error(e.getMessage)
-          // dont throw exception here.
+          // don't throw exception here.
           case e: Exception =>
             val errorMsg = s"Delete data operation is failed for ${ database }.${ tableName }."
             LOGGER.error(errorMsg + e.getMessage)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
index b2d21cc..eb98264 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
@@ -257,7 +257,7 @@ object AlterTableDropPartitionMetaListener extends OperationEventListener{
     if (parentCarbonTable.hasAggregationDataMap) {
       // used as a flag to block direct drop partition on aggregate tables fired by the user
       operationContext.setProperty("isInternalDropCall", "true")
-      // Filter out all the tables which dont have the partition being dropped.
+      // Filter out all the tables which don't have the partition being dropped.
       val childTablesWithoutPartitionColumns =
         parentCarbonTable.getTableInfo.getDataMapSchemaList.asScala.filter { dataMapSchema =>
           val childColumns = dataMapSchema.getChildSchema.getListOfColumns.asScala

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
index a2c4bd0..da8e48b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
@@ -366,18 +366,18 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy {
       var newProjectList: Seq[Attribute] = Seq.empty
       // In case of implicit exist we should disable vectorPushRowFilters as it goes in IUD flow
       // to get the positionId or tupleID
-      var implictsExisted = false
+      var implicitExisted = false
       val updatedProjects = projects.map {
           case a@Alias(s: ScalaUDF, name)
             if name.equalsIgnoreCase(CarbonCommonConstants.POSITION_ID) ||
                 name.equalsIgnoreCase(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID) =>
             val reference = AttributeReference(name, StringType, true)().withExprId(a.exprId)
             newProjectList :+= reference
-            implictsExisted = true
+            implicitExisted = true
             reference
           case a@Alias(s: ScalaUDF, name)
             if name.equalsIgnoreCase(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_SEGMENTID) =>
-            implictsExisted = true
+            implicitExisted = true
             val reference =
               AttributeReference(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID,
                 StringType, true)().withExprId(a.exprId)
@@ -393,7 +393,7 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy {
         getRequestedColumns(relation, projectsAttr, filterSet, handledSet, newProjectList)
 
       var updateRequestedColumns =
-        if (!vectorPushRowFilters && !implictsExisted && !hasDictionaryFilterCols
+        if (!vectorPushRowFilters && !implicitExisted && !hasDictionaryFilterCols
             && !hasMoreDictionaryCols) {
           updateRequestedColumnsFunc(
             (projectSet ++ filterSet).map(relation.attributeMap).toSeq,
@@ -406,7 +406,7 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy {
         supportBatchedDataSource(relation.relation.sqlContext,
           updateRequestedColumns.asInstanceOf[Seq[Attribute]]) &&
         needDecoder.isEmpty
-      if (!vectorPushRowFilters && !supportBatch && !implictsExisted && !hasDictionaryFilterCols
+      if (!vectorPushRowFilters && !supportBatch && !implicitExisted && !hasDictionaryFilterCols
           && !hasMoreDictionaryCols) {
         // revert for row scan
         updateRequestedColumns = updateRequestedColumnsFunc(requestedColumns, table, needDecoder)
@@ -423,7 +423,7 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy {
         updateRequestedColumns.asInstanceOf[Seq[Attribute]])
       // Check whether spark should handle row filters in case of vector flow.
       if (!vectorPushRowFilters && scan.isInstanceOf[CarbonDataSourceScan]
-          && !implictsExisted && !hasDictionaryFilterCols && !hasMoreDictionaryCols) {
+          && !implicitExisted && !hasDictionaryFilterCols && !hasMoreDictionaryCols) {
         // Here carbon only do page pruning and row level pruning will be done by spark.
         scan.inputRDDs().head match {
           case rdd: CarbonScanRDD[InternalRow] =>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
index b341d6a..eaef9c1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
@@ -59,7 +59,7 @@ object Spark2TestQueryExecutor {
     FileFactory.getConfiguration.
       set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER")
   }
-  val metastoredb = s"$integrationPath/spark-common-cluster-test/target"
+  val metaStoreDB = s"$integrationPath/spark-common-cluster-test/target"
   val spark = SparkSession
     .builder().config(conf)
     .master(TestQueryExecutor.masterUrl)
@@ -67,7 +67,7 @@ object Spark2TestQueryExecutor {
     .enableHiveSupport()
     .config("spark.sql.warehouse.dir", warehouse)
     .config("spark.sql.crossJoin.enabled", "true")
-    .getOrCreateCarbonSession(null, TestQueryExecutor.metastoredb)
+    .getOrCreateCarbonSession(null, TestQueryExecutor.metaStoreDB)
   if (warehouse.startsWith("hdfs://")) {
     System.setProperty(CarbonCommonConstants.HDFS_TEMP_LOCATION, warehouse)
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCK_TYPE,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
index 58e5665..c7f080d 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
@@ -122,7 +122,7 @@ class AllDictionaryTestCase extends Spark2QueryTest with BeforeAndAfterAll {
   def buildRelation() = {
     val warehouse = s"$resourcesPath/target/warehouse"
     val storeLocation = s"$resourcesPath/target/store"
-    val metastoredb = s"$resourcesPath/target"
+    val metaStoreDB = s"$resourcesPath/target"
     CarbonProperties.getInstance()
       .addProperty("carbon.custom.distribution", "true")
     CarbonProperties.getInstance()
@@ -137,7 +137,7 @@ class AllDictionaryTestCase extends Spark2QueryTest with BeforeAndAfterAll {
       .config("spark.network.timeout", "600s")
       .config("spark.executor.heartbeatInterval", "600s")
       .config("carbon.enable.vector.reader","false")
-      .getOrCreateCarbonSession(storeLocation, metastoredb)
+      .getOrCreateCarbonSession(storeLocation, metaStoreDB)
     val catalog = CarbonEnv.getInstance(spark).carbonMetaStore
     sampleRelation = catalog.lookupRelation(Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
       "sample")(spark).asInstanceOf[CarbonRelation]

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
index 9607bbc..e4dca67 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
@@ -116,7 +116,7 @@ class ExternalColumnDictionaryTestCase extends Spark2QueryTest with BeforeAndAft
   def buildRelation() = {
     val warehouse = s"$resourcesPath/target/warehouse"
     val storeLocation = s"$resourcesPath/target/store"
-    val metastoredb = s"$resourcesPath/target"
+    val metaStoreDB = s"$resourcesPath/target"
     CarbonProperties.getInstance()
       .addProperty("carbon.custom.distribution", "true")
     CarbonProperties.getInstance()
@@ -131,7 +131,7 @@ class ExternalColumnDictionaryTestCase extends Spark2QueryTest with BeforeAndAft
       .config("spark.network.timeout", "600s")
       .config("spark.executor.heartbeatInterval", "600s")
       .config("carbon.enable.vector.reader","false")
-      .getOrCreateCarbonSession(storeLocation, metastoredb)
+      .getOrCreateCarbonSession(storeLocation, metaStoreDB)
     val catalog = CarbonEnv.getInstance(spark).carbonMetaStore
     extComplexRelation = catalog
       .lookupRelation(Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
index 31f2234..d912a25 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
@@ -190,7 +190,7 @@ public class StructDataType implements GenericDataType<StructObject> {
         children.get(i).writeByteArray(data[i], dataOutputStream, logHolder);
       }
 
-      // For other children elements which dont have data, write empty
+      // For other children elements which don't have data, write empty
       for (int i = data.length; i < children.size(); i++) {
         children.get(i).writeByteArray(null, dataOutputStream, logHolder);
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3e4638b3/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
index d02348d..51a7b3a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
@@ -161,11 +161,11 @@ public class CarbonLoadModelBuilder {
     String global_sort_partitions = optionsFinal.get("global_sort_partitions");
     String timestampformat = optionsFinal.get("timestampformat");
     String dateFormat = optionsFinal.get("dateformat");
-    String delimeter = optionsFinal.get("delimiter");
-    String complex_delimeter_level1 = optionsFinal.get("complex_delimiter_level_1");
-    String complex_delimeter_level2 = optionsFinal.get("complex_delimiter_level_2");
-    String complex_delimeter_level3 = optionsFinal.get("complex_delimiter_level_3");
-    String complex_delimeter_level4 = optionsFinal.get("complex_delimiter_level_4");
+    String delimiter = optionsFinal.get("delimiter");
+    String complex_delimiter_level1 = optionsFinal.get("complex_delimiter_level_1");
+    String complex_delimiter_level2 = optionsFinal.get("complex_delimiter_level_2");
+    String complex_delimiter_level3 = optionsFinal.get("complex_delimiter_level_3");
+    String complex_delimiter_level4 = optionsFinal.get("complex_delimiter_level_4");
     String all_dictionary_path = optionsFinal.get("all_dictionary_path");
     String column_dict = optionsFinal.get("columndict");
     validateDateTimeFormat(timestampformat, "TimestampFormat");
@@ -257,20 +257,20 @@ public class CarbonLoadModelBuilder {
     carbonLoadModel.setGlobalSortPartitions(global_sort_partitions);
     carbonLoadModel.setUseOnePass(Boolean.parseBoolean(single_pass));
 
-    if (delimeter.equalsIgnoreCase(complex_delimeter_level1) ||
-        complex_delimeter_level1.equalsIgnoreCase(complex_delimeter_level2) ||
-        delimeter.equalsIgnoreCase(complex_delimeter_level2) ||
-        delimeter.equalsIgnoreCase(complex_delimeter_level3)) {
+    if (delimiter.equalsIgnoreCase(complex_delimiter_level1) ||
+        complex_delimiter_level1.equalsIgnoreCase(complex_delimiter_level2) ||
+        delimiter.equalsIgnoreCase(complex_delimiter_level2) ||
+        delimiter.equalsIgnoreCase(complex_delimiter_level3)) {
       throw new InvalidLoadOptionException("Field Delimiter and Complex types delimiter are same");
     } else {
-      carbonLoadModel.setComplexDelimiter(complex_delimeter_level1);
-      carbonLoadModel.setComplexDelimiter(complex_delimeter_level2);
-      carbonLoadModel.setComplexDelimiter(complex_delimeter_level3);
-      carbonLoadModel.setComplexDelimiter(complex_delimeter_level4);
+      carbonLoadModel.setComplexDelimiter(complex_delimiter_level1);
+      carbonLoadModel.setComplexDelimiter(complex_delimiter_level2);
+      carbonLoadModel.setComplexDelimiter(complex_delimiter_level3);
+      carbonLoadModel.setComplexDelimiter(complex_delimiter_level4);
     }
     // set local dictionary path, and dictionary file extension
     carbonLoadModel.setAllDictPath(all_dictionary_path);
-    carbonLoadModel.setCsvDelimiter(CarbonUtil.unescapeChar(delimeter));
+    carbonLoadModel.setCsvDelimiter(CarbonUtil.unescapeChar(delimiter));
     carbonLoadModel.setCsvHeader(fileHeader);
     carbonLoadModel.setColDictFilePath(column_dict);