You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2018/12/15 03:15:29 UTC

[1/2] carbondata git commit: [CARBONDATA-3002] Fix some spell error

Repository: carbondata
Updated Branches:
  refs/heads/master 90f63a0cc -> 3ff61cacc


http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala b/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
index b6667df..efaa191 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
@@ -82,7 +82,7 @@ object TableLoader {
 
     val spark = TableAPIUtil.spark(storePath, s"TableLoader: $dbName.$tableName")
 
-    CarbonEnv.getInstance(spark).carbonMetastore.
+    CarbonEnv.getInstance(spark).carbonMetaStore.
       checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, Some(dbName)))
     loadTable(spark, Option(dbName), tableName, inputPaths, map)
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala b/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
index 759539b..47b6b67 100644
--- a/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
+++ b/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
@@ -164,10 +164,10 @@ class CarbonHiveSessionCatalog(
       carbonDatasourceHadoopRelation: CarbonDatasourceHadoopRelation): Boolean = {
     var isRefreshed = false
     val storePath = CarbonProperties.getStorePath
-    carbonEnv.carbonMetastore.
+    carbonEnv.carbonMetaStore.
       checkSchemasModifiedTimeAndReloadTable(identifier)
 
-    val table = carbonEnv.carbonMetastore.getTableFromMetadataCache(
+    val table = carbonEnv.carbonMetaStore.getTableFromMetadataCache(
       carbonDatasourceHadoopRelation.carbonTable.getDatabaseName,
       carbonDatasourceHadoopRelation.carbonTable.getTableName)
     if (table.isEmpty || (table.isDefined &&

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
index ed5486b..bceb0fc 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/AllDictionaryTestCase.scala
@@ -137,7 +137,7 @@ class AllDictionaryTestCase extends Spark2QueryTest with BeforeAndAfterAll {
       .config("spark.executor.heartbeatInterval", "600s")
       .config("carbon.enable.vector.reader","false")
       .getOrCreateCarbonSession(storeLocation, metastoredb)
-    val catalog = CarbonEnv.getInstance(spark).carbonMetastore
+    val catalog = CarbonEnv.getInstance(spark).carbonMetaStore
     sampleRelation = catalog.lookupRelation(Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
       "sample")(spark).asInstanceOf[CarbonRelation]
     complexRelation = catalog.lookupRelation(Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
index 245ee7c..3c50a18 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/DictionaryLRUCacheTestCase.scala
@@ -39,7 +39,7 @@ class DictionaryLRUCacheTestCase extends Spark2QueryTest with BeforeAndAfterAll
   var path : String = null
 
   def checkDictionaryAccessCount(databaseName: String, tableName: String): Unit = {
-    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
+    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
       .lookupRelation(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
       .asInstanceOf[CarbonRelation].carbonTable
     val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
index 69248d6..9607bbc 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
@@ -132,7 +132,7 @@ class ExternalColumnDictionaryTestCase extends Spark2QueryTest with BeforeAndAft
       .config("spark.executor.heartbeatInterval", "600s")
       .config("carbon.enable.vector.reader","false")
       .getOrCreateCarbonSession(storeLocation, metastoredb)
-    val catalog = CarbonEnv.getInstance(spark).carbonMetastore
+    val catalog = CarbonEnv.getInstance(spark).carbonMetaStore
     extComplexRelation = catalog
       .lookupRelation(Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
         "extComplextypes")(spark)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala
index e3c2d88..5096089 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala
@@ -226,7 +226,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
     assertResult("Streaming property value is incorrect")(msg.getMessage)
 
     val identifier = new TableIdentifier("batch_table", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     var server: ServerSocket = null
     try {
@@ -253,7 +253,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
   // input source: file
   test("streaming ingest from file source") {
     val identifier = new TableIdentifier("stream_table_file", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdata").getCanonicalPath
     // streaming ingest 10 rows
@@ -277,7 +277,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
 
   test("test preaggregate table creation on streaming table without handoff") {
     val identifier = new TableIdentifier("agg_table", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     // streaming ingest 10 rows
@@ -297,7 +297,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
   test("test if data is loaded into preaggregate after handoff is fired") {
     createTable(tableName = "agg_table2", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("agg_table2", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     // streaming ingest 10 rows
@@ -358,7 +358,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
   test("test whether data is loaded into preaggregate after handoff is fired") {
     createTable(tableName = "agg_table2", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("agg_table2", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     // streaming ingest 10 rows
@@ -401,7 +401,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
   test("test whether data is loaded into preaggregate before handoff is fired") {
     createTable(tableName = "agg_table2", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("agg_table2", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     // streaming ingest 10 rows
@@ -438,7 +438,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists timeseries_table")
     createTable(tableName = "timeseries_table", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("timeseries_table", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     val thread = createFileStreamingThread(spark, carbonTable, csvDataDir, intervalSecond = 1,
@@ -465,7 +465,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists timeseries_table")
     createTable(tableName = "timeseries_table", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("timeseries_table", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     val thread = createFileStreamingThread(spark, carbonTable, csvDataDir, intervalSecond = 1,
@@ -549,7 +549,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
 
   def loadData() {
     val identifier = new TableIdentifier("agg_table2", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     // streaming ingest 10 rows
@@ -567,7 +567,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists agg_table2")
     createTable(tableName = "agg_table2", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("agg_table2", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdata1").getCanonicalPath
     generateCSVDataFile(spark, idStart = 10, rowNums = 5, csvDataDir)
@@ -598,7 +598,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
   test("test if data is loaded in aggregate table after handoff is done for streaming table") {
     createTable(tableName = "agg_table3", streaming = true, withBatchLoad = false)
     val identifier = new TableIdentifier("agg_table3", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     val csvDataDir = new File("target/csvdatanew").getCanonicalPath
     generateCSVDataFile(spark, idStart = 10, rowNums = 5, csvDataDir)
@@ -1621,7 +1621,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
 
   test("block drop streaming table while streaming is in progress") {
     val identifier = new TableIdentifier("stream_table_drop", Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     var server: ServerSocket = null
     try {
@@ -2420,7 +2420,7 @@ class TestStreamingTableOpName extends QueryTest with BeforeAndAfterAll {
       badRecordsPath: String = CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL
   ): Unit = {
     val identifier = new TableIdentifier(tableName, Option("streaming"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     var server: ServerSocket = null
     try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithRowParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithRowParser.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithRowParser.scala
index 21cad07..5d806a3 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithRowParser.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithRowParser.scala
@@ -816,7 +816,7 @@ class TestStreamingTableWithRowParser extends QueryTest with BeforeAndAfterAll {
       autoHandoff: Boolean = CarbonCommonConstants.ENABLE_AUTO_HANDOFF_DEFAULT.toBoolean
   ): Unit = {
     val identifier = new TableIdentifier(tableName, Option("streaming1"))
-    val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
+    val carbonTable = CarbonEnv.getInstance(spark).carbonMetaStore.lookupRelation(identifier)(spark)
       .asInstanceOf[CarbonRelation].metaData.carbonTable
     var server: ServerSocket = null
     try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
index a0c801a..e4e7d92 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
@@ -67,7 +67,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       checkAnswer(sql("select count(*) from carbontable"), Row(1))
@@ -83,7 +83,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       checkAnswer(sql("select count(*) from carbontable"), Row(1))
@@ -103,7 +103,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     backUpData(dblocation, "carbontable")
     backUpData(dblocation, "carbontable_preagg1")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       restoreData(dblocation, "carbontable_preagg1")
       sql("refresh table carbontable")
@@ -126,7 +126,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     backUpData(dblocation, "carbontable")
     backUpData(dblocation, "carbontable_preagg1")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       restoreData(dblocation, "carbontable_preagg1")
       sql("refresh table carbontable")
@@ -149,7 +149,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     backUpData(dblocation, "carbontable")
     backUpData(dblocation, "carbontable_preagg1")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       intercept[ProcessMetaDataException] {
         sql("refresh table carbontable")
@@ -169,7 +169,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'bb','bbb'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       // update operation
@@ -195,7 +195,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql(s"LOAD DATA LOCAL INPATH '$testData' into table automerge")
     backUpData(dblocation, "automerge")
     sql("drop table automerge")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "automerge")
       sql("refresh table automerge")
       // update operation
@@ -216,7 +216,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'bb','bbb'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       // delete operation
@@ -238,7 +238,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'bb','bbb'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       sql("Alter table carbontable add columns(c4 string) " +
@@ -260,7 +260,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'bb','bbb'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       sql("Alter table carbontable change c2 c2 long")
@@ -281,7 +281,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'bb','bbb'")
     backUpData(dblocation, "carbontable")
     sql("drop table carbontable")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "carbontable")
       sql("refresh table carbontable")
       sql("Alter table carbontable drop columns(c2)")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/processing/src/main/java/org/apache/carbondata/processing/loading/CarbonDataLoadConfiguration.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/CarbonDataLoadConfiguration.java b/processing/src/main/java/org/apache/carbondata/processing/loading/CarbonDataLoadConfiguration.java
index d3501c7..54dc2d4 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/CarbonDataLoadConfiguration.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/CarbonDataLoadConfiguration.java
@@ -116,7 +116,7 @@ public class CarbonDataLoadConfiguration {
   private boolean carbonTransactionalTable;
 
   /**
-   * Flder path to where data should be written for this load.
+   * Folder path to where data should be written for this load.
    */
   private String dataWritePath;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
index aecc52e..71d61db 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
@@ -217,7 +217,7 @@ public class CarbonLoadModel implements Serializable {
   private boolean isJsonFileLoad;
 
   /**
-   * Flder path to where data should be written for this load.
+   * Folder path to where data should be written for this load.
    */
   private String dataWritePath;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
index 64fcaa2..7688415 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonLoaderUtil.java
@@ -551,14 +551,14 @@ public final class CarbonLoaderUtil {
    * This method will divide the blocks among the nodes as per the data locality
    *
    * @param blockInfos blocks
-   * @param noOfNodesInput -1 if number of nodes has to be decided
+   * @param numOfNodesInput -1 if number of nodes has to be decided
    *                       based on block location information
    * @param blockAssignmentStrategy strategy used to assign blocks
    * @param expectedMinSizePerNode the property load_min_size_inmb specified by the user
    * @return a map that maps node to blocks
    */
   public static Map<String, List<Distributable>> nodeBlockMapping(
-      List<Distributable> blockInfos, int noOfNodesInput, List<String> activeNodes,
+      List<Distributable> blockInfos, int numOfNodesInput, List<String> activeNodes,
       BlockAssignmentStrategy blockAssignmentStrategy, String expectedMinSizePerNode) {
     ArrayList<NodeMultiBlockRelation> rtnNode2Blocks = new ArrayList<>();
 
@@ -569,9 +569,9 @@ public final class CarbonLoaderUtil {
       nodes.add(relation.getNode());
     }
 
-    int noofNodes = (-1 == noOfNodesInput) ? nodes.size() : noOfNodesInput;
+    int numOfNodes = (-1 == numOfNodesInput) ? nodes.size() : numOfNodesInput;
     if (null != activeNodes) {
-      noofNodes = activeNodes.size();
+      numOfNodes = activeNodes.size();
     }
 
     // calculate the average expected size for each node
@@ -579,7 +579,7 @@ public final class CarbonLoaderUtil {
     long totalFileSize = 0;
     if (BlockAssignmentStrategy.BLOCK_NUM_FIRST == blockAssignmentStrategy) {
       if (blockInfos.size() > 0) {
-        sizePerNode = blockInfos.size() / noofNodes;
+        sizePerNode = blockInfos.size() / numOfNodes;
       }
       sizePerNode = sizePerNode <= 0 ? 1 : sizePerNode;
     } else if (BlockAssignmentStrategy.BLOCK_SIZE_FIRST == blockAssignmentStrategy
@@ -587,7 +587,7 @@ public final class CarbonLoaderUtil {
       for (Distributable blockInfo : uniqueBlocks) {
         totalFileSize += ((TableBlockInfo) blockInfo).getBlockLength();
       }
-      sizePerNode = totalFileSize / noofNodes;
+      sizePerNode = totalFileSize / numOfNodes;
     }
 
     // if enable to control the minimum amount of input data for each node
@@ -1152,7 +1152,7 @@ public final class CarbonLoaderUtil {
    * @return
    * @throws IOException
    */
-  public static String mergeIndexFilesinPartitionedSegment(CarbonTable table, String segmentId,
+  public static String mergeIndexFilesInPartitionedSegment(CarbonTable table, String segmentId,
       String uuid) throws IOException {
     String tablePath = table.getTablePath();
     return new CarbonIndexFileMergeWriter(table)


[2/2] carbondata git commit: [CARBONDATA-3002] Fix some spell error

Posted by ch...@apache.org.
[CARBONDATA-3002] Fix some spell error

Fix some spell error

This closes #2890


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/3ff61cac
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/3ff61cac
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/3ff61cac

Branch: refs/heads/master
Commit: 3ff61cacc774c433a82f30fdfc18437df2863cbe
Parents: 90f63a0
Author: xubo245 <xu...@huawei.com>
Authored: Thu Nov 1 11:16:43 2018 +0800
Committer: chenliang613 <ch...@huawei.com>
Committed: Sat Dec 15 11:15:18 2018 +0800

----------------------------------------------------------------------
 .../carbondata/core/datastore/FileReader.java   |  6 ++--
 .../core/datastore/impl/FileReaderImpl.java     |  8 ++---
 .../carbondata/core/locks/CarbonLockUtil.java   |  4 +--
 .../AbstractDetailQueryResultIterator.java      |  8 ++---
 .../carbondata/core/util/CarbonProperties.java  |  8 ++---
 .../sdv/register/TestRegisterCarbonTable.scala  | 20 +++++------
 ...ithColumnMetCacheAndCacheLevelProperty.scala |  4 +--
 .../createTable/TestCreateTableAsSelect.scala   |  2 +-
 .../DBLocationCarbonTableTestCase.scala         |  4 +--
 .../iud/DeleteCarbonTableTestCase.scala         |  6 ++--
 .../StandardPartitionTableLoadingTestCase.scala |  2 +-
 .../apache/spark/rdd/CarbonMergeFilesRDD.scala  |  2 +-
 .../spark/rdd/CarbonDataRDDFactory.scala        |  4 +--
 .../sql/CarbonDatasourceHadoopRelation.scala    |  2 +-
 .../scala/org/apache/spark/sql/CarbonEnv.scala  | 12 +++----
 .../org/apache/spark/sql/CarbonSource.scala     |  8 ++---
 .../datamap/CarbonDropDataMapCommand.scala      |  2 +-
 .../CarbonAlterTableCompactionCommand.scala     |  2 +-
 .../management/CarbonLoadDataCommand.scala      |  2 +-
 .../management/RefreshCarbonTableCommand.scala  |  2 +-
 .../spark/sql/execution/command/package.scala   |  2 +-
 .../CarbonAlterTableDropPartitionCommand.scala  |  2 +-
 .../CarbonAlterTableSplitPartitionCommand.scala |  2 +-
 .../CarbonShowCarbonPartitionsCommand.scala     |  2 +-
 .../preaaggregate/PreAggregateUtil.scala        |  8 ++---
 .../CarbonAlterTableAddColumnCommand.scala      |  2 +-
 .../CarbonAlterTableDataTypeChangeCommand.scala |  2 +-
 .../CarbonAlterTableDropColumnCommand.scala     |  2 +-
 .../schema/CarbonAlterTableRenameCommand.scala  |  2 +-
 .../CarbonCreateTableAsSelectCommand.scala      |  2 +-
 .../table/CarbonCreateTableCommand.scala        |  4 +--
 .../table/CarbonDescribeFormattedCommand.scala  |  2 +-
 .../command/table/CarbonDropTableCommand.scala  |  2 +-
 .../sql/execution/strategy/DDLStrategy.scala    | 36 ++++++++++----------
 .../spark/sql/hive/CarbonFileMetastore.scala    |  2 +-
 .../sql/hive/CarbonPreAggregateRules.scala      |  4 +--
 .../sql/parser/CarbonSparkSqlParserUtil.scala   |  2 +-
 .../org/apache/spark/util/AlterTableUtil.scala  | 16 ++++-----
 .../org/apache/spark/util/CleanFiles.scala      |  2 +-
 .../org/apache/spark/util/Compaction.scala      |  2 +-
 .../apache/spark/util/DeleteSegmentByDate.scala |  2 +-
 .../apache/spark/util/DeleteSegmentById.scala   |  2 +-
 .../org/apache/spark/util/TableAPIUtil.scala    |  2 +-
 .../org/apache/spark/util/TableLoader.scala     |  2 +-
 .../spark/sql/hive/CarbonSessionState.scala     |  4 +--
 .../spark/util/AllDictionaryTestCase.scala      |  2 +-
 .../spark/util/DictionaryLRUCacheTestCase.scala |  2 +-
 .../util/ExternalColumnDictionaryTestCase.scala |  2 +-
 .../carbondata/TestStreamingTableOpName.scala   | 26 +++++++-------
 .../TestStreamingTableWithRowParser.scala       |  2 +-
 .../register/TestRegisterCarbonTable.scala      | 22 ++++++------
 .../loading/CarbonDataLoadConfiguration.java    |  2 +-
 .../loading/model/CarbonLoadModel.java          |  2 +-
 .../processing/util/CarbonLoaderUtil.java       | 14 ++++----
 54 files changed, 146 insertions(+), 146 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java b/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
index df0d745..2527f1d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/FileReader.java
@@ -56,7 +56,7 @@ public interface FileReader {
 
   /**
    * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
+   * length will be always 4 because int byte size if 4
    *
    * @param filePath fully qualified file path
    * @param offset   reading start position,
@@ -66,7 +66,7 @@ public interface FileReader {
 
   /**
    * This method will be used to read long from file from postion(offset), here
-   * length will be always 8 bacause int byte size is 8
+   * length will be always 8 because int byte size is 8
    *
    * @param filePath fully qualified file path
    * @param offset   reading start position,
@@ -76,7 +76,7 @@ public interface FileReader {
 
   /**
    * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
+   * length will be always 4 because int byte size if 4
    *
    * @param filePath fully qualified file path
    * @return read int

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
index 0c1b2b0..f513cb9 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileReaderImpl.java
@@ -81,7 +81,7 @@ public class FileReaderImpl implements FileReader {
 
   /**
    * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
+   * length will be always 4 because int byte size if 4
    *
    * @param filePath fully qualified file path
    * @param offset   reading start position,
@@ -95,7 +95,7 @@ public class FileReaderImpl implements FileReader {
 
   /**
    * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
+   * length will be always 4 because int byte size if 4
    *
    * @param filePath fully qualified file path
    * @return read int
@@ -108,7 +108,7 @@ public class FileReaderImpl implements FileReader {
 
   /**
    * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
+   * length will be always 4 because int byte size if 4
    *
    * @param filePath fully qualified file path
    * @param offset   reading start position,
@@ -184,7 +184,7 @@ public class FileReaderImpl implements FileReader {
 
   /**
    * This method will be used to read long from file from postion(offset), here
-   * length will be always 8 bacause int byte size is 8
+   * length will be always 8 because int byte size is 8
    *
    * @param filePath fully qualified file path
    * @param offset   reading start position,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
index 89ccbd0..fbbf0c6 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockUtil.java
@@ -121,7 +121,7 @@ public class CarbonLockUtil {
    */
   public static void deleteExpiredSegmentLockFiles(CarbonTable carbonTable) {
     final long currTime = System.currentTimeMillis();
-    final long segmentLockFilesPreservTime =
+    final long segmentLockFilesPreserveTime =
         CarbonProperties.getInstance().getSegmentLockFilesPreserveHours();
     AbsoluteTableIdentifier absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier();
     String lockFilesDir = CarbonProperties.getInstance()
@@ -137,7 +137,7 @@ public class CarbonLockUtil {
 
             @Override public boolean accept(CarbonFile pathName) {
               if (CarbonTablePath.isSegmentLockFilePath(pathName.getName())) {
-                return (currTime - pathName.getLastModifiedTime()) > segmentLockFilesPreservTime;
+                return (currTime - pathName.getLastModifiedTime()) > segmentLockFilesPreserveTime;
               }
               return false;
             }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index ed78aa7..9282d44 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -107,11 +107,11 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
         FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getTablePath()));
     this.fileReader.setReadPageByPage(queryModel.isReadPageByPage());
     this.execService = execService;
-    intialiseInfos();
+    initialiseInfos();
     initQueryStatiticsModel();
   }
 
-  private void intialiseInfos() {
+  private void initialiseInfos() {
     for (BlockExecutionInfo blockInfo : blockExecutionInfos) {
       Map<String, DeleteDeltaVo> deletedRowsMap = null;
       // if delete delta file is present
@@ -172,7 +172,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
           carbonDeleteDeltaFileReader = new CarbonDeleteFilesDataReader();
           Map<String, DeleteDeltaVo> deletedRowsMap = carbonDeleteDeltaFileReader
               .getDeletedRowsDataVo(deleteDeltaInfo.getDeleteDeltaFile());
-          setDeltedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
+          setDeletedDeltaBoToDataBlock(deleteDeltaInfo, deletedRowsMap, dataBlock);
           // remove the lock
           deleteDeltaToLockObjectMap.remove(deleteDeltaInfo);
           return deletedRowsMap;
@@ -193,7 +193,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
    * @param deletedRecordsMap
    * @param dataBlock
    */
-  private void setDeltedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
+  private void setDeletedDeltaBoToDataBlock(DeleteDeltaInfo deleteDeltaInfo,
       Map<String, DeleteDeltaVo> deletedRecordsMap, AbstractIndex dataBlock) {
     // check if timestamp of data block is less than the latest delete delta timestamp
     // then update the delete delta details and timestamp in data block

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index f4a75a8..93d622d 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -593,21 +593,21 @@ public final class CarbonProperties {
    * This method validates the number of column read in one IO
    */
   private void validateNumberOfColumnPerIORead() {
-    String numberofColumnPerIOString = carbonProperties
+    String numberOfColumnPerIOString = carbonProperties
         .getProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
             CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
     try {
-      short numberofColumnPerIO = Short.parseShort(numberofColumnPerIOString);
+      short numberofColumnPerIO = Short.parseShort(numberOfColumnPerIOString);
       if (numberofColumnPerIO < CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_MIN
           || numberofColumnPerIO > CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_MAX) {
-        LOGGER.info("The Number Of pages per blocklet column value \"" + numberofColumnPerIOString
+        LOGGER.info("The Number Of pages per blocklet column value \"" + numberOfColumnPerIOString
             + "\" is invalid. Using the default value \""
             + CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
         carbonProperties.setProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,
             CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
       }
     } catch (NumberFormatException e) {
-      LOGGER.info("The Number Of pages per blocklet column value \"" + numberofColumnPerIOString
+      LOGGER.info("The Number Of pages per blocklet column value \"" + numberOfColumnPerIOString
           + "\" is invalid. Using the default value \""
           + CarbonV3DataFormatConstants.NUMBER_OF_COLUMN_TO_READ_IN_IO_DEFAULTVALUE);
       carbonProperties.setProperty(NUMBER_OF_COLUMN_TO_READ_IN_IO,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/register/TestRegisterCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/register/TestRegisterCarbonTable.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/register/TestRegisterCarbonTable.scala
index bf07bd6..caae8e1 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/register/TestRegisterCarbonTable.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/register/TestRegisterCarbonTable.scala
@@ -83,7 +83,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("use carbon")
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")
@@ -99,7 +99,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("use carbon")
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")
@@ -118,7 +118,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'aa','aaa'")
     sql("insert into carbontable select 'a',10,'aa','aaa'")
     sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       backUpData(dbLocationCustom, "carbontable_preagg1")
       sql("drop table carbontable")
@@ -141,7 +141,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'aa','aaa'")
     sql("insert into carbontable select 'a',10,'aa','aaa'")
     sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       backUpData(dbLocationCustom, "carbontable_preagg1")
       sql("drop table carbontable")
@@ -164,7 +164,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into carbontable select 'b',1,'aa','aaa'")
     sql("insert into carbontable select 'a',10,'aa','aaa'")
     sql("create datamap preagg1 on table carbontable using 'preaggregate' as select c1,sum(c2) from carbontable group by c1")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       backUpData(dbLocationCustom, "carbontable_preagg1")
       sql("drop table carbontable")
@@ -183,7 +183,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     sql("insert into carbontable select 'b',1,'bb','bbb'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")
@@ -205,7 +205,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     sql("insert into carbontable select 'b',1,'bb','bbb'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")
@@ -227,7 +227,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     sql("insert into carbontable select 'b',1,'bb','bbb'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")
@@ -249,7 +249,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     sql("insert into carbontable select 'b',1,'bb','bbb'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")
@@ -270,7 +270,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     sql("insert into carbontable select 'b',1,'bb','bbb'")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       backUpData(dbLocationCustom, "carbontable")
       sql("drop table carbontable")
       restoreData(dbLocationCustom, "carbontable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
index 1c54c48..7c9a9fc 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
@@ -75,7 +75,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with Be
       tableName: String,
       segmentId: String,
       isSchemaModified: Boolean = false): List[DataMap[_ <: Blocklet]] = {
-    val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
+    val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
       .lookupRelation(Some(dbName), tableName)(sqlContext.sparkSession)
       .asInstanceOf[CarbonRelation]
     val carbonTable = relation.carbonTable
@@ -291,7 +291,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty extends QueryTest with Be
     sql("insert into minMaxSerialize select 'a','aa','aaa'")
     checkAnswer(sql("select * from minMaxSerialize where name='a'"), Row("a", "aa", "aaa"))
     checkAnswer(sql("select * from minMaxSerialize where name='b'"), Seq.empty)
-    val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
+    val relation: CarbonRelation = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
       .lookupRelation(Some("default"), "minMaxSerialize")(sqlContext.sparkSession)
       .asInstanceOf[CarbonRelation]
     val carbonTable = relation.carbonTable

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
index c95e5a4..3896061 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
@@ -151,7 +151,7 @@ class TestCreateTableAsSelect extends QueryTest with BeforeAndAfterAll {
       "create table ctas_tblproperties_testt stored by 'carbondata' TBLPROPERTIES" +
         "('DICTIONARY_INCLUDE'='key', 'sort_scope'='global_sort') as select * from carbon_ctas_test")
     checkAnswer(sql("select * from ctas_tblproperties_testt"), sql("select * from carbon_ctas_test"))
-    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
+    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
       .lookupRelation(Option("default"), "ctas_tblproperties_testt")(Spark2TestQueryExecutor.spark)
       .asInstanceOf[CarbonRelation].carbonTable
     val metadataFolderPath: CarbonFile = FileFactory.getCarbonFile(carbonTable.getMetadataPath)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
index cb47cf5..7b80c72 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
@@ -284,7 +284,7 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop table carbontable")
     // perform file check
     assert(FileFactory.isFileExist(timestampFile, timestampFileType, true) ||
-           CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
+           CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore)
 
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_UPDATE_SYNC_FOLDER,
@@ -295,7 +295,7 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop table carbontable")
     // perform file check
     assert(FileFactory.isFileExist(timestampFile, timestampFileType, true) ||
-           CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore)
+           CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore)
   }
 
   override def afterAll {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
index 39582f0..2f95133 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
@@ -208,7 +208,7 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
       .getCarbonTable(Some("iud_db"), "update_status_files")(sqlContext.sparkSession)
     val metaPath = carbonTable.getMetadataPath
     val files = FileFactory.getCarbonFile(metaPath)
-    val result = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.getClass
+    val result = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.getClass
     if(result.getCanonicalName.contains("CarbonFileMetastore")) {
       assert(files.listFiles(new CarbonFileFilter {
         override def accept(file: CarbonFile): Boolean = !file.isDirectory
@@ -257,11 +257,11 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
     assert(
       listOfTupleId(4).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(4).endsWith("/0/0/4"))
 
-    val carbonTable_part = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
+    val carbonTable_part = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
       .lookupRelation(Option("iud_db"), "dest_tuple_part")(Spark2TestQueryExecutor.spark)
       .asInstanceOf[CarbonRelation].carbonTable
 
-    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetastore
+    val carbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
       .lookupRelation(Option("iud_db"), "dest_tuple")(Spark2TestQueryExecutor.spark)
       .asInstanceOf[CarbonRelation].carbonTable
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
index 9a0080c..059dd2b 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
@@ -406,7 +406,7 @@ class StandardPartitionTableLoadingTestCase extends QueryTest with BeforeAndAfte
     val dblocation = table.getTablePath.substring(0, table.getTablePath.lastIndexOf("/"))
     backUpData(dblocation, "restorepartition")
     sql("drop table restorepartition")
-    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.isReadFromHiveMetaStore) {
+    if (!CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.isReadFromHiveMetaStore) {
       restoreData(dblocation, "restorepartition")
       sql("refresh table restorepartition")
       checkAnswer(sql("select count(*) from restorepartition"), rows)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala b/integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
index 3605dde..c101d02 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
@@ -127,7 +127,7 @@ class CarbonMergeFilesRDD(
 
       if (isHivePartitionedTable) {
         CarbonLoaderUtil
-          .mergeIndexFilesinPartitionedSegment(carbonTable, split.segmentId,
+          .mergeIndexFilesInPartitionedSegment(carbonTable, split.segmentId,
             segmentFileNameToSegmentIdMap.get(split.segmentId))
       } else {
         new CarbonIndexFileMergeWriter(carbonTable)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index a849c99..34c6592 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -192,7 +192,7 @@ object CarbonDataRDDFactory {
             LOGGER.info("System level compaction lock is enabled.")
             val skipCompactionTables = ListBuffer[CarbonTableIdentifier]()
             var tableForCompaction = CarbonCompactionUtil.getNextTableToCompact(
-              CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
+              CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
                 .listAllTables(sqlContext.sparkSession).toArray,
               skipCompactionTables.toList.asJava)
             while (null != tableForCompaction) {
@@ -247,7 +247,7 @@ object CarbonDataRDDFactory {
               }
               // ********* check again for all the tables.
               tableForCompaction = CarbonCompactionUtil.getNextTableToCompact(
-                CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore
+                CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore
                   .listAllTables(sqlContext.sparkSession).toArray,
                 skipCompactionTables.asJava)
             }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
index 04ec75d..f848ae1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
@@ -61,7 +61,7 @@ case class CarbonDatasourceHadoopRelation(
   CarbonSession.updateSessionInfoToCurrentThread(sparkSession)
 
   @transient lazy val carbonRelation: CarbonRelation =
-    CarbonEnv.getInstance(sparkSession).carbonMetastore.
+    CarbonEnv.getInstance(sparkSession).carbonMetaStore.
     createCarbonRelation(parameters, identifier, sparkSession)
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index 90ba58c..e114e06 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -45,7 +45,7 @@ import org.apache.carbondata.spark.readsupport.SparkRowReadSupportImpl
  */
 class CarbonEnv {
 
-  var carbonMetastore: CarbonMetaStore = _
+  var carbonMetaStore: CarbonMetaStore = _
 
   var sessionParams: SessionParams = _
 
@@ -53,7 +53,7 @@ class CarbonEnv {
 
   private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
 
-  // set readsupport class global so that the executor can get it.
+  // set readSupport class global so that the executor can get it.
   SparkReadSupport.readSupportClass = classOf[SparkRowReadSupportImpl]
 
   var initialized = false
@@ -105,7 +105,7 @@ class CarbonEnv {
         }
         // add session params after adding DefaultCarbonParams
         config.addDefaultCarbonSessionParams()
-        carbonMetastore = {
+        carbonMetaStore = {
           // trigger event for CarbonEnv create
           val operationContext = new OperationContext
           val carbonEnvInitPreEvent: CarbonEnvInitPreEvent =
@@ -195,7 +195,7 @@ object CarbonEnv {
     (sparkSession: SparkSession): CarbonTable = {
     refreshRelationFromCache(TableIdentifier(tableName, databaseNameOp))(sparkSession)
     val databaseName = getDatabaseName(databaseNameOp)(sparkSession)
-    val catalog = getInstance(sparkSession).carbonMetastore
+    val catalog = getInstance(sparkSession).carbonMetaStore
     // refresh cache
     catalog.checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, databaseNameOp))
 
@@ -211,10 +211,10 @@ object CarbonEnv {
   def refreshRelationFromCache(identifier: TableIdentifier)(sparkSession: SparkSession): Boolean = {
     var isRefreshed = false
     val carbonEnv = getInstance(sparkSession)
-    val table = carbonEnv.carbonMetastore.getTableFromMetadataCache(
+    val table = carbonEnv.carbonMetaStore.getTableFromMetadataCache(
       identifier.database.getOrElse(sparkSession.sessionState.catalog.getCurrentDatabase),
       identifier.table)
-    if (carbonEnv.carbonMetastore
+    if (carbonEnv.carbonMetaStore
           .checkSchemasModifiedTimeAndReloadTable(identifier)  && table.isDefined) {
       sparkSession.sessionState.catalog.refreshTable(identifier)
       val tablePath = table.get.getTablePath

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index cd1087d..9899e8b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -91,7 +91,7 @@ class CarbonSource extends CreatableRelationProvider with RelationProvider
     CarbonEnv.getInstance(sqlContext.sparkSession)
     var newParameters = CarbonScalaUtil.getDeserializedParameters(parameters)
     val options = new CarbonOption(newParameters)
-    val isExists = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetastore.tableExists(
+    val isExists = CarbonEnv.getInstance(sqlContext.sparkSession).carbonMetaStore.tableExists(
       options.tableName, options.dbName)(sqlContext.sparkSession)
     val (doSave, doAppend) = (mode, isExists) match {
       case (SaveMode.ErrorIfExists, true) =>
@@ -182,7 +182,7 @@ class CarbonSource extends CreatableRelationProvider with RelationProvider
       case _: NoSuchTableException =>
         LOGGER.warn("Carbon Table [" +dbName +"] [" +tableName +"] is not found, " +
           "Now existing Schema will be overwritten with default properties")
-        val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
         val identifier = AbsoluteTableIdentifier.from(
           CarbonEnv.getTablePath(Some(dbName), tableName)(sparkSession),
           dbName,
@@ -295,7 +295,7 @@ object CarbonSource {
             "Schema cannot be specified in a Create Table As Select (CTAS) statement")
         }
         sqlParser
-          .getFields(CarbonEnv.getInstance(sparkSession).carbonMetastore
+          .getFields(CarbonEnv.getInstance(sparkSession).carbonMetaStore
             .getSchemaFromUnresolvedRelation(sparkSession, q))
       case None =>
         sqlParser.getFields(dataSchema)
@@ -315,7 +315,7 @@ object CarbonSource {
       tableDesc: CatalogTable,
       sparkSession: SparkSession,
       query: Option[LogicalPlan] = None): CatalogTable = {
-    val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val storageFormat = tableDesc.storage
     val properties = storageFormat.properties
     if (!properties.contains("carbonSchemaPartsNo")) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
index 38ec07d..54096ca 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
@@ -65,7 +65,7 @@ case class CarbonDropDataMapCommand(
       val dbName = CarbonEnv.getDatabaseName(databaseNameOp)(sparkSession)
       val locksToBeAcquired = List(LockUsage.METADATA_LOCK)
       val carbonEnv = CarbonEnv.getInstance(sparkSession)
-      val catalog = carbonEnv.carbonMetastore
+      val catalog = carbonEnv.carbonMetaStore
       val tablePath = CarbonEnv.getTablePath(databaseNameOp, tableName)(sparkSession)
       catalog.checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, Some(dbName)))
       if (mainTable == null) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
index 6edfcf4..a908a84 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
@@ -74,7 +74,7 @@ case class CarbonAlterTableCompactionCommand(
     table = if (tableInfoOp.isDefined) {
       CarbonTable.buildFromTableInfo(tableInfoOp.get)
     } else {
-      val relation = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val relation = CarbonEnv.getInstance(sparkSession).carbonMetaStore
         .lookupRelation(Option(dbName), tableName)(sparkSession).asInstanceOf[CarbonRelation]
       if (relation == null) {
         throw new NoSuchTableException(dbName, tableName)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
index 7a974e3..3d2924c 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
@@ -114,7 +114,7 @@ case class CarbonLoadDataCommand(
     table = if (tableInfoOp.isDefined) {
         CarbonTable.buildFromTableInfo(tableInfoOp.get)
       } else {
-        val relation = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val relation = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .lookupRelation(Option(dbName), tableName)(sparkSession).asInstanceOf[CarbonRelation]
         if (relation == null) {
           throw new NoSuchTableException(dbName, tableName)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
index b35c285..6a9ac0a 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
@@ -51,7 +51,7 @@ case class RefreshCarbonTableCommand(
   val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
 
   override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
-    val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val databaseName = CarbonEnv.getDatabaseName(databaseNameOp)(sparkSession)
     setAuditTable(databaseName, tableName)
     // Steps

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala
index f7f76b9..8073f90 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala
@@ -36,7 +36,7 @@ object Checker {
       session: SparkSession): Unit = {
     val database = dbName.getOrElse(session.catalog.currentDatabase)
     val identifier = TableIdentifier(tableName, dbName)
-    if (!CarbonEnv.getInstance(session).carbonMetastore.tableExists(identifier)(session)) {
+    if (!CarbonEnv.getInstance(session).carbonMetaStore.tableExists(identifier)(session)) {
       val err = s"table $dbName.$tableName not found"
       LogServiceFactory.getLogService(this.getClass.getName).error(err)
       throw new NoSuchTableException(database, tableName)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
index 832cb00..b40bb6d 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropPartitionCommand.scala
@@ -59,7 +59,7 @@ case class CarbonAlterTableDropPartitionCommand(
     val tableName = model.tableName
     setAuditTable(dbName, tableName)
     setAuditInfo(Map("partition" -> model.partitionId))
-    val carbonMetaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val carbonMetaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val relation = carbonMetaStore.lookupRelation(Option(dbName), tableName)(sparkSession)
       .asInstanceOf[CarbonRelation]
     val tablePath = relation.carbonTable.getTablePath

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
index f17cdd6..4d32d00 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableSplitPartitionCommand.scala
@@ -59,7 +59,7 @@ case class CarbonAlterTableSplitPartitionCommand(
 
   override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
     val dbName = splitPartitionModel.databaseName.getOrElse(sparkSession.catalog.currentDatabase)
-    val carbonMetaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val carbonMetaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val tableName = splitPartitionModel.tableName
     setAuditTable(dbName, tableName)
     setAuditInfo(Map("partition" -> splitPartitionModel.partitionId))

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonShowCarbonPartitionsCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonShowCarbonPartitionsCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonShowCarbonPartitionsCommand.scala
index 2915981..0e57513 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonShowCarbonPartitionsCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonShowCarbonPartitionsCommand.scala
@@ -36,7 +36,7 @@ private[sql] case class CarbonShowCarbonPartitionsCommand(
   override val output: Seq[Attribute] = CommonUtil.partitionInfoOutput
 
   override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
-    val relation = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val relation = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       .lookupRelation(tableIdentifier)(sparkSession).asInstanceOf[CarbonRelation]
     val carbonTable = relation.carbonTable
     setAuditTable(carbonTable)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
index 0599fb3..319f84b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala
@@ -426,7 +426,7 @@ object PreAggregateUtil {
     val dbName = carbonTable.getDatabaseName
     val tableName = carbonTable.getTableName
     try {
-      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       locks = acquireLock(dbName, tableName, locksToBeAcquired, carbonTable)
       // get the latest carbon table and check for column existence
       // read the latest schema file
@@ -468,7 +468,7 @@ object PreAggregateUtil {
       thriftTable: TableInfo)(sparkSession: SparkSession): Unit = {
     val dbName = carbonTable.getDatabaseName
     val tableName = carbonTable.getTableName
-    CarbonEnv.getInstance(sparkSession).carbonMetastore
+    CarbonEnv.getInstance(sparkSession).carbonMetaStore
       .updateTableSchemaForDataMap(carbonTable.getCarbonTableIdentifier,
         carbonTable.getCarbonTableIdentifier,
         thriftTable,
@@ -527,7 +527,7 @@ object PreAggregateUtil {
    */
   def revertMainTableChanges(dbName: String, tableName: String, numberOfChildSchema: Int)
     (sparkSession: SparkSession): Unit = {
-    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
     carbonTable.getTableLastUpdatedTime
     val thriftTable: TableInfo = metastore.getThriftTableInfo(carbonTable)
@@ -539,7 +539,7 @@ object PreAggregateUtil {
 
   def getChildCarbonTable(databaseName: String, tableName: String)
     (sparkSession: SparkSession): Option[CarbonTable] = {
-    val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val carbonTable = metaStore.getTableFromMetadataCache(databaseName, tableName)
     if (carbonTable.isEmpty) {
       try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
index 719ed4a..9c92614 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
@@ -57,7 +57,7 @@ private[sql] case class CarbonAlterTableAddColumnCommand(
       // completion of 1st operation but as look up relation is called before it will have the
       // older carbon table and this can lead to inconsistent state in the system. Therefor look
       // up relation should be called after acquiring the lock
-      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
       if (!carbonTable.canAllow(carbonTable, TableOperation.ALTER_ADD_COLUMN)) {
         throw new MalformedCarbonCommandException(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
index 2bcd3aa..23a7615 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDataTypeChangeCommand.scala
@@ -56,7 +56,7 @@ private[sql] case class CarbonAlterTableDataTypeChangeCommand(
     try {
       locks = AlterTableUtil
         .validateTableAndAcquireLock(dbName, tableName, locksToBeAcquired)(sparkSession)
-      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
       if (!carbonTable.canAllow(carbonTable, TableOperation.ALTER_CHANGE_DATATYPE,
         alterTableDataTypeChangeModel.columnName)) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
index ccf9e54..9ef6fd8 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
@@ -55,7 +55,7 @@ private[sql] case class CarbonAlterTableDropColumnCommand(
     try {
       locks = AlterTableUtil
         .validateTableAndAcquireLock(dbName, tableName, locksToBeAcquired)(sparkSession)
-      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
       if (!carbonTable.canAllow(carbonTable, TableOperation.ALTER_DROP,
           alterTableDropColumnModel.columns.asJava)) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
index c64f50b..dbf665a 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
@@ -62,7 +62,7 @@ private[sql] case class CarbonAlterTableRenameCommand(
     val oldTableName = oldTableIdentifier.table.toLowerCase
     val newTableName = newTableIdentifier.table.toLowerCase
     LOGGER.info(s"Rename table request has been received for $oldDatabaseName.$oldTableName")
-    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val relation: CarbonRelation =
       metastore.lookupRelation(oldTableIdentifier.database, oldTableName)(sparkSession)
         .asInstanceOf[CarbonRelation]

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
index 54be619..a5889ca 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
@@ -72,7 +72,7 @@ case class CarbonCreateTableAsSelectCommand(
         databaseOpt = Some(tableInfo.getDatabaseName)
       }
       val dbName = CarbonEnv.getDatabaseName(databaseOpt)(sparkSession)
-      val carbonDataSourceHadoopRelation = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val carbonDataSourceHadoopRelation = CarbonEnv.getInstance(sparkSession).carbonMetaStore
         .createCarbonDataSourceHadoopRelation(sparkSession,
           TableIdentifier(tableName, Option(dbName)))
       // execute command to load data into carbon table

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
index 713561b..a13e8e0 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
@@ -115,7 +115,7 @@ case class CarbonCreateTableCommand(
       val createTablePreExecutionEvent: CreateTablePreExecutionEvent =
         CreateTablePreExecutionEvent(sparkSession, tableIdentifier, Some(tableInfo))
       OperationListenerBus.getInstance.fireEvent(createTablePreExecutionEvent, operationContext)
-      val catalog = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val catalog = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       val carbonSchemaString = catalog.generateTableSchemaString(tableInfo, tableIdentifier)
       if (createDSTable) {
         try {
@@ -170,7 +170,7 @@ case class CarbonCreateTableCommand(
           case e: Exception =>
             // call the drop table to delete the created table.
             try {
-              CarbonEnv.getInstance(sparkSession).carbonMetastore
+              CarbonEnv.getInstance(sparkSession).carbonMetaStore
                 .dropTable(tableIdentifier)(sparkSession)
             } catch {
               case _: Exception => // No operation

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index 2d560df..69db4e0 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -44,7 +44,7 @@ private[sql] case class CarbonDescribeFormattedCommand(
   extends MetadataCommand {
 
   override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
-    val relation = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val relation = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       .lookupRelation(tblIdentifier)(sparkSession).asInstanceOf[CarbonRelation]
     setAuditTable(relation.databaseName, relation.tableName)
     var results: Seq[(String, String, String)] = child.schema.fields.map { field =>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
index 0505a75..f69ef9e 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
@@ -95,7 +95,7 @@ case class CarbonDropTableCommand(
           sparkSession)
       OperationListenerBus.getInstance.fireEvent(dropTablePreEvent, operationContext)
 
-      CarbonEnv.getInstance(sparkSession).carbonMetastore.dropTable(identifier)(sparkSession)
+      CarbonEnv.getInstance(sparkSession).carbonMetaStore.dropTable(identifier)(sparkSession)
 
       if (carbonTable.hasDataMapSchema) {
         // drop all child tables

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index 4cc62c6..3c9e538 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -58,7 +58,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
   def apply(plan: LogicalPlan): Seq[SparkPlan] = {
     plan match {
       case LoadDataCommand(identifier, path, isLocal, isOverwrite, partition)
-        if CarbonEnv.getInstance(sparkSession).carbonMetastore
+        if CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(identifier)(sparkSession) =>
         ExecutedCommandExec(
           CarbonLoadDataCommand(
@@ -78,7 +78,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
       case alter@AlterTableRenameCommand(oldTableIdentifier, newTableIdentifier, _) =>
         val dbOption = oldTableIdentifier.database.map(_.toLowerCase)
         val tableIdentifier = TableIdentifier(oldTableIdentifier.table.toLowerCase(), dbOption)
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableIdentifier)(sparkSession)
         if (isCarbonTable) {
           val renameModel = AlterTableRenameModel(tableIdentifier, newTableIdentifier)
@@ -87,13 +87,13 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
           ExecutedCommandExec(alter) :: Nil
         }
       case DropTableCommand(identifier, ifNotExists, isView, _)
-        if CarbonEnv.getInstance(sparkSession).carbonMetastore
+        if CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .isTablePathExists(identifier)(sparkSession) =>
         ExecutedCommandExec(
           CarbonDropTableCommand(ifNotExists, identifier.database,
             identifier.table.toLowerCase)) :: Nil
       case createLikeTable: CreateTableLikeCommand =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(createLikeTable.sourceTable)(sparkSession)
         if (isCarbonTable) {
           throw new MalformedCarbonCommandException(
@@ -118,7 +118,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
       case drop@DropDatabaseCommand(dbName, ifExists, isCascade) =>
         ExecutedCommandExec(CarbonDropDatabaseCommand(drop)) :: Nil
       case alterTable@CarbonAlterTableCompactionCommand(altertablemodel, _, _) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(TableIdentifier(altertablemodel.tableName,
             altertablemodel.dbName))(sparkSession)
         if (isCarbonTable) {
@@ -128,7 +128,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
             "Operation not allowed : " + altertablemodel.alterSql)
         }
       case dataTypeChange@CarbonAlterTableDataTypeChangeCommand(alterTableChangeDataTypeModel) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(TableIdentifier(alterTableChangeDataTypeModel.tableName,
             alterTableChangeDataTypeModel.databaseName))(sparkSession)
         if (isCarbonTable) {
@@ -147,7 +147,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
           throw new MalformedCarbonCommandException("Unsupported alter operation on hive table")
         }
       case addColumn@CarbonAlterTableAddColumnCommand(alterTableAddColumnsModel) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(TableIdentifier(alterTableAddColumnsModel.tableName,
             alterTableAddColumnsModel.databaseName))(sparkSession)
         if (isCarbonTable) {
@@ -181,7 +181,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
           throw new MalformedCarbonCommandException("Unsupported alter operation on hive table")
         }
       case dropColumn@CarbonAlterTableDropColumnCommand(alterTableDropColumnModel) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(TableIdentifier(alterTableDropColumnModel.tableName,
             alterTableDropColumnModel.databaseName))(sparkSession)
         if (isCarbonTable) {
@@ -206,7 +206,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
         } else {
           false
         }
-        if (CarbonEnv.getInstance(sparkSession).carbonMetastore
+        if (CarbonEnv.getInstance(sparkSession).carbonMetaStore
               .tableExists(identifier)(sparkSession) && (isExtended || isFormatted)) {
           val resolvedTable =
             sparkSession.sessionState.executePlan(UnresolvedRelation(identifier)).analyzed
@@ -221,10 +221,10 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
           Nil
         }
       case ShowPartitionsCommand(t, cols) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(t)(sparkSession)
         if (isCarbonTable) {
-          val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+          val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
             .lookupRelation(t)(sparkSession).asInstanceOf[CarbonRelation].carbonTable
           if (carbonTable != null && !carbonTable.getTableInfo.isTransactionalTable) {
             throw new MalformedCarbonCommandException(
@@ -239,7 +239,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
           ExecutedCommandExec(ShowPartitionsCommand(t, cols)) :: Nil
         }
       case adp@AlterTableDropPartitionCommand(tableName, specs, ifExists, purge, retainData) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableName)(sparkSession)
         if (isCarbonTable) {
           ExecutedCommandExec(
@@ -290,10 +290,10 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
         val cmd = CreateDataSourceTableCommand(updatedCatalog, ignoreIfExists)
         ExecutedCommandExec(cmd) :: Nil
       case AlterTableSetPropertiesCommand(tableName, properties, isView)
-        if CarbonEnv.getInstance(sparkSession).carbonMetastore
+        if CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableName)(sparkSession) => {
 
-        val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .lookupRelation(tableName)(sparkSession).asInstanceOf[CarbonRelation].carbonTable
         if (carbonTable != null && !carbonTable.getTableInfo.isTransactionalTable) {
           throw new MalformedCarbonCommandException(
@@ -323,7 +323,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
         ExecutedCommandExec(CarbonAlterTableSetCommand(tableName, properties, isView)) :: Nil
       }
       case AlterTableUnsetPropertiesCommand(tableName, propKeys, ifExists, isView)
-        if CarbonEnv.getInstance(sparkSession).carbonMetastore
+        if CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableName)(sparkSession) => {
         // TODO remove this limitation later
         if (propKeys.exists(_.equalsIgnoreCase("streaming"))) {
@@ -336,7 +336,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
       case rename@AlterTableRenamePartitionCommand(tableName, oldPartition, newPartition) =>
         val dbOption = tableName.database.map(_.toLowerCase)
         val tableIdentifier = TableIdentifier(tableName.table.toLowerCase(), dbOption)
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableIdentifier)(sparkSession)
         if (isCarbonTable) {
           throw new UnsupportedOperationException("Renaming partition on table is not supported")
@@ -346,7 +346,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
       case addPart@AlterTableAddPartitionCommand(tableName, partitionSpecsAndLocs, ifNotExists) =>
         val dbOption = tableName.database.map(_.toLowerCase)
         val tableIdentifier = TableIdentifier(tableName.table.toLowerCase(), dbOption)
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableIdentifier)(sparkSession)
         if (isCarbonTable) {
           ExecutedCommandExec(
@@ -371,7 +371,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
         }
         ExecutedCommandExec(plan.asInstanceOf[RunnableCommand]) :: Nil
       case alterSetLoc@AlterTableSetLocationCommand(tableName, _, _) =>
-        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore
+        val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
           .tableExists(tableName)(sparkSession)
         if (isCarbonTable) {
           throw new UnsupportedOperationException("Set partition location is not supported")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 96b31c2..c1be154 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -169,7 +169,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
         }
         val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.from(
            catalogTable.location.toString, database, tableIdentifier.table)
-        CarbonEnv.getInstance(sparkSession).carbonMetastore.
+        CarbonEnv.getInstance(sparkSession).carbonMetaStore.
           createCarbonRelation(catalogTable.storage.properties, identifier, sparkSession)
       case _ => throw new NoSuchTableException(database, tableIdentifier.table)
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonPreAggregateRules.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonPreAggregateRules.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonPreAggregateRules.scala
index 9b204f8..a8d7d22 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonPreAggregateRules.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonPreAggregateRules.scala
@@ -738,7 +738,7 @@ case class CarbonPreAggregateQueryRules(sparkSession: SparkSession) extends Rule
     val identifier = TableIdentifier(
       dataMapSchema.getChildSchema.getTableName,
       Some(parentTable.getDatabaseName))
-    val catalog = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val catalog = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val carbonRelation =
       catalog.lookupRelation(identifier)(sparkSession).asInstanceOf[CarbonRelation]
     val segmentStatusManager = new SegmentStatusManager(carbonRelation.carbonTable
@@ -1045,7 +1045,7 @@ case class CarbonPreAggregateQueryRules(sparkSession: SparkSession) extends Rule
     if (!selectedAggMaps.isEmpty) {
       // filter the selected child schema based on size to select the pre-aggregate tables
       // that are enabled
-      val catalog = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val catalog = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       val relationBuffer = selectedAggMaps.asScala.map { selectedDataMapSchema =>
         val identifier = TableIdentifier(
           selectedDataMapSchema.getRelationIdentifier.getTableName,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
index 0378bf9..46473f2 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
@@ -115,7 +115,7 @@ object CarbonSparkSqlParserUtil {
           operationNotAllowed("Create external table as select", tableHeader)
         }
         fields = parser
-          .getFields(CarbonEnv.getInstance(sparkSession).carbonMetastore
+          .getFields(CarbonEnv.getInstance(sparkSession).carbonMetaStore
             .getSchemaFromUnresolvedRelation(sparkSession, Some(q).get))
       case _ =>
       // ignore this case

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
index 3faa111..756a1ad 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
@@ -63,7 +63,7 @@ object AlterTableUtil {
       locksToBeAcquired: List[String])
     (sparkSession: SparkSession): List[ICarbonLock] = {
     val relation =
-      CarbonEnv.getInstance(sparkSession).carbonMetastore
+      CarbonEnv.getInstance(sparkSession).carbonMetaStore
         .lookupRelation(Option(dbName), tableName)(sparkSession)
         .asInstanceOf[CarbonRelation]
     if (relation == null) {
@@ -116,7 +116,7 @@ object AlterTableUtil {
       Option[Seq[org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema]]) = {
     val dbName = carbonTable.getDatabaseName
     val tableName = carbonTable.getTableName
-    CarbonEnv.getInstance(sparkSession).carbonMetastore
+    CarbonEnv.getInstance(sparkSession).carbonMetaStore
       .updateTableSchemaForAlter(carbonTable.getCarbonTableIdentifier,
         carbonTable.getCarbonTableIdentifier,
         thriftTable,
@@ -124,7 +124,7 @@ object AlterTableUtil {
         carbonTable.getAbsoluteTableIdentifier.getTablePath)(sparkSession)
     val tableIdentifier = TableIdentifier(tableName, Some(dbName))
     sparkSession.catalog.refreshTable(tableIdentifier.quotedString)
-    val schema = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val schema = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       .lookupRelation(tableIdentifier)(sparkSession).schema.json
     val schemaParts = prepareSchemaJsonForAlterTable(sparkSession.sparkContext.getConf, schema)
     (tableIdentifier, schemaParts, cols)
@@ -167,7 +167,7 @@ object AlterTableUtil {
     val oldCarbonTableIdentifier = oldCarbonTable.getCarbonTableIdentifier
     val database = oldCarbonTable.getDatabaseName
     val newCarbonTableIdentifier = new CarbonTableIdentifier(database, newTableName, tableId)
-    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val fileType = FileFactory.getFileType(tablePath)
     if (FileFactory.isFileExist(tablePath, fileType)) {
       val tableInfo = metastore.getThriftTableInfo(oldCarbonTable)
@@ -195,7 +195,7 @@ object AlterTableUtil {
    */
   def revertAddColumnChanges(dbName: String, tableName: String, timeStamp: Long)
     (sparkSession: SparkSession): Unit = {
-    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
     val thriftTable: TableInfo = metastore.getThriftTableInfo(carbonTable)
     val evolutionEntryList = thriftTable.fact_table.schema_evolution.schema_evolution_history
@@ -220,7 +220,7 @@ object AlterTableUtil {
    */
   def revertDropColumnChanges(dbName: String, tableName: String, timeStamp: Long)
     (sparkSession: SparkSession): Unit = {
-    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
     val thriftTable: TableInfo = metastore.getThriftTableInfo(carbonTable)
     val evolutionEntryList = thriftTable.fact_table.schema_evolution.schema_evolution_history
@@ -251,7 +251,7 @@ object AlterTableUtil {
    */
   def revertDataTypeChanges(dbName: String, tableName: String, timeStamp: Long)
     (sparkSession: SparkSession): Unit = {
-    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+    val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
     val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
     val thriftTable: TableInfo = metastore.getThriftTableInfo(carbonTable)
     val evolutionEntryList = thriftTable.fact_table.schema_evolution.schema_evolution_history
@@ -294,7 +294,7 @@ object AlterTableUtil {
     try {
       locks = AlterTableUtil
         .validateTableAndAcquireLock(dbName, tableName, locksToBeAcquired)(sparkSession)
-      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetastore
+      val metastore = CarbonEnv.getInstance(sparkSession).carbonMetaStore
       val carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession)
       val lowerCasePropertiesMap: mutable.Map[String, String] = mutable.Map.empty
       // convert all the keys to lower case

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala b/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
index d4d9a84..cb3ae29 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
@@ -69,7 +69,7 @@ object CleanFiles {
       forceTableClean = args(2).toBoolean
     }
     val spark = TableAPIUtil.spark(storePath, s"CleanFiles: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.
+    CarbonEnv.getInstance(spark).carbonMetaStore.
       checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, Some(dbName)))
 
     cleanFiles(spark, dbName, tableName, forceTableClean)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala b/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
index d4ec81e..0a3a870 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
@@ -58,7 +58,7 @@ object Compaction {
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val compactionType = TableAPIUtil.escape(args(2))
     val spark = TableAPIUtil.spark(storePath, s"Compaction: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.
+    CarbonEnv.getInstance(spark).carbonMetaStore.
       checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, Some(dbName)))
     compaction(spark, dbName, tableName, compactionType)
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
index fcc9f2f..90a37f6 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
@@ -45,7 +45,7 @@ object DeleteSegmentByDate {
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val dateValue = TableAPIUtil.escape(args(2))
     val spark = TableAPIUtil.spark(storePath, s"DeleteSegmentByDate: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.
+    CarbonEnv.getInstance(spark).carbonMetaStore.
       checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, Some(dbName)))
     deleteSegmentByDate(spark, dbName, tableName, dateValue)
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
index 13ef933..15bec02 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
@@ -50,7 +50,7 @@ object DeleteSegmentById {
     val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1)))
     val segmentIds = extractSegmentIds(TableAPIUtil.escape(args(2)))
     val spark = TableAPIUtil.spark(storePath, s"DeleteSegmentById: $dbName.$tableName")
-    CarbonEnv.getInstance(spark).carbonMetastore.
+    CarbonEnv.getInstance(spark).carbonMetaStore.
       checkSchemasModifiedTimeAndReloadTable(TableIdentifier(tableName, Some(dbName)))
     deleteSegmentById(spark, dbName, tableName, segmentIds)
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3ff61cac/integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
index a8094b6..0557e14 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
@@ -62,7 +62,7 @@ object TableAPIUtil {
       spark: SparkSession,
       dbName: String,
       tableName: String): Unit = {
-    if (!CarbonEnv.getInstance(spark).carbonMetastore
+    if (!CarbonEnv.getInstance(spark).carbonMetaStore
       .tableExists(tableName, Some(dbName))(spark)) {
       val err = s"table $dbName.$tableName not found"
       LOGGER.error(err)