You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/07/27 12:42:02 UTC
[2/7] carbondata git commit: [CARBONDATA-1301] change command to
update schema and data separately
[CARBONDATA-1301] change command to update schema and data separately
This closes #1160
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/2dbfab64
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/2dbfab64
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/2dbfab64
Branch: refs/heads/master
Commit: 2dbfab64103f139de9cfb7ad683c877e9802c562
Parents: 042a05a
Author: jackylk <ja...@huawei.com>
Authored: Wed Jul 12 00:40:44 2017 +0800
Committer: Raghunandan S <ca...@gmail.com>
Committed: Thu Jul 27 18:47:52 2017 +0800
----------------------------------------------------------------------
.../execution/command/carbonTableSchema.scala | 28 ++++++++++----------
.../spark/sql/hive/CarbonFileMetastore.scala | 3 +--
2 files changed, 15 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/2dbfab64/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 1781477..f3baf58 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -233,10 +233,10 @@ case class CreateTable(cm: TableModel, createDSTable: Boolean = true) extends Ru
sparkSession.sql(
s"""CREATE TABLE $dbName.$tbName
- |(${ fields.map(f => f.rawSchema).mkString(",") })
- |USING org.apache.spark.sql.CarbonSource""".stripMargin +
- s""" OPTIONS (tableName "$tbName", dbName "$dbName", tablePath """.stripMargin +
- s""""$tablePath"$carbonSchemaString) """)
+ |(${ fields.map(f => f.rawSchema).mkString(",") })
+ |USING org.apache.spark.sql.CarbonSource""".stripMargin +
+ s""" OPTIONS (tableName "$tbName", dbName "$dbName", tablePath """.stripMargin +
+ s""""$tablePath"$carbonSchemaString) """)
} catch {
case e: Exception =>
val identifier: TableIdentifier = TableIdentifier(tbName, Some(dbName))
@@ -268,8 +268,8 @@ case class DeleteLoadsById(
override def processData(sparkSession: SparkSession): Seq[Row] = {
Checker.validateTableExists(databaseNameOp, tableName, sparkSession)
val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore.
- lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
- tableMeta.carbonTable
+ lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
+ tableMeta.carbonTable
CarbonStore.deleteLoadById(
loadids,
getDB.getDatabaseName(databaseNameOp, sparkSession),
@@ -293,8 +293,8 @@ case class DeleteLoadsByLoadDate(
override def processData(sparkSession: SparkSession): Seq[Row] = {
Checker.validateTableExists(databaseNameOp, tableName, sparkSession)
val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore.
- lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
- tableMeta.carbonTable
+ lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
+ tableMeta.carbonTable
CarbonStore.deleteLoadByDate(
loadDate,
getDB.getDatabaseName(databaseNameOp, sparkSession),
@@ -751,10 +751,10 @@ case class LoadTable(
}
GlobalDictionaryUtil.generateGlobalDictionary(
- sparkSession.sqlContext,
- carbonLoadModel,
- relation.tableMeta.storePath,
- dictionaryDataFrame)
+ sparkSession.sqlContext,
+ carbonLoadModel,
+ relation.tableMeta.storePath,
+ dictionaryDataFrame)
CarbonDataRDDFactory.loadCarbonData(sparkSession.sqlContext,
carbonLoadModel,
relation.tableMeta.storePath,
@@ -847,8 +847,8 @@ case class ShowLoads(
override def processData(sparkSession: SparkSession): Seq[Row] = {
Checker.validateTableExists(databaseNameOp, tableName, sparkSession)
val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore.
- lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
- tableMeta.carbonTable
+ lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
+ tableMeta.carbonTable
CarbonStore.showSegments(
getDB.getDatabaseName(databaseNameOp, sparkSession),
tableName,
http://git-wip-us.apache.org/repos/asf/carbondata/blob/2dbfab64/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 048681c..549841b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -430,8 +430,7 @@ class CarbonFileMetastore(conf: RuntimeConfig, val storePath: String) extends Ca
// while drop we should refresh the schema modified time so that if any thing has changed
// in the other beeline need to update.
checkSchemasModifiedTimeAndReloadTables
- val file = FileFactory.getCarbonFile(metadataFilePath, fileType)
- CarbonUtil.deleteFoldersAndFilesSilent(file.getParentFile)
+
val metadataToBeRemoved: Option[TableMeta] = getTableFromMetadata(dbName,
tableIdentifier.table)
metadataToBeRemoved match {