You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/08/30 15:18:22 UTC

[1/2] incubator-carbondata git commit: Invalidating the table from hive context while dropping the table

Repository: incubator-carbondata
Updated Branches:
  refs/heads/master ac5ddda9e -> 4275277b9


Invalidating the table from hive context while dropping the table

Adding test case for new scenario


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/1a08f6c3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/1a08f6c3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/1a08f6c3

Branch: refs/heads/master
Commit: 1a08f6c362ea87ed09d9ea683bfdfc0207a7dbf9
Parents: ac5ddda
Author: Manohar V <ma...@gmail.com>
Authored: Tue Aug 30 12:00:39 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Aug 30 20:46:27 2016 +0530

----------------------------------------------------------------------
 .../execution/command/carbonTableSchema.scala   |  12 +-
 .../spark/sql/hive/CarbonHiveMetadataUtil.scala |  58 +++++++++
 .../spark/sql/hive/CarbonMetastoreCatalog.scala |   2 +-
 .../deleteTable/TestDeleteTableNewDDL.scala     | 123 ++++++++++++++++++-
 4 files changed, 183 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1a08f6c3/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index ca95512..85f0a2d 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -32,7 +32,7 @@ import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Cast, Literal}
 import org.apache.spark.sql.execution.{RunnableCommand, SparkPlan}
-import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.sql.hive.{CarbonHiveMetadataUtil, HiveContext}
 import org.apache.spark.sql.types.TimestampType
 import org.apache.spark.util.FileUtils
 import org.codehaus.jackson.map.ObjectMapper
@@ -1254,15 +1254,7 @@ private[sql] case class DropTableCommand(ifExistsSet: Boolean, databaseNameOp: O
       }
       if (sqlContext.tableNames(dbName).map(x => x.toLowerCase())
         .contains(tableName.toLowerCase())) {
-        try {
-          sqlContext.asInstanceOf[HiveContext].catalog.client.
-            runSqlHive(s"DROP TABLE IF EXISTS $dbName.$tableName")
-        } catch {
-          case e: RuntimeException =>
-            LOGGER.audit(
-              s"Error While deleting the table $dbName.$tableName during drop carbon table" +
-              e.getMessage)
-        }
+          CarbonHiveMetadataUtil.invalidateAndDropTable(dbName, tableName, sqlContext)
       } else if (!ifExistsSet) {
         sys.error(s"Carbon Table $dbName.$tableName does not exist")
       }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1a08f6c3/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
new file mode 100644
index 0000000..3a3e8e2
--- /dev/null
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.hive
+
+import org.apache.spark.sql.SQLContext
+import org.apache.spark.sql.catalyst.SqlParser
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+
+
+/**
+ * This class contains all carbon hive metadata related utilities
+ */
+object CarbonHiveMetadataUtil {
+
+  @transient
+  val LOGGER = LogServiceFactory.getLogService(CarbonHiveMetadataUtil.getClass.getName)
+
+
+  /**
+   * This method invalidates the table from HiveMetastoreCatalog before dropping table
+   *
+   * @param schemaName
+   * @param cubeName
+   * @param sqlContext
+   */
+  def invalidateAndDropTable(schemaName: String,
+      cubeName: String,
+      sqlContext: SQLContext): Unit = {
+    val hiveContext = sqlContext.asInstanceOf[HiveContext]
+    val tableWithDb = schemaName + "." + cubeName
+    val tableIdent = SqlParser.parseTableIdentifier(tableWithDb)
+    try {
+      hiveContext.catalog.invalidateTable(tableIdent)
+      hiveContext.runSqlHive(s"DROP TABLE IF EXISTS $schemaName.$cubeName")
+    } catch {
+      case e: Exception =>
+        LOGGER.audit(
+          s"Error While deleting the table $schemaName.$cubeName during drop carbon table" +
+          e.getMessage)
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1a08f6c3/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
index f5e9618..a9d6077 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
@@ -443,8 +443,8 @@ class CarbonMetastoreCatalog(hiveContext: HiveContext, val storePath: String,
            c.carbonTableIdentifier.getTableName.equalsIgnoreCase(tableName))(0)
     org.apache.carbondata.core.carbon.metadata.CarbonMetadata.getInstance
       .removeTable(dbName + "_" + tableName)
+    CarbonHiveMetadataUtil.invalidateAndDropTable(dbName, tableName, sqlContext)
 
-    sqlContext.asInstanceOf[HiveContext].runSqlHive(s"DROP TABLE IF EXISTS $dbName.$tableName")
     // discard cached table info in cachedDataSourceTables
     sqlContext.catalog.refreshTable(tableIdentifier)
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1a08f6c3/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index 5aa8596..bd822e4 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -18,6 +18,8 @@
  */
 package org.apache.carbondata.spark.testsuite.deleteTable
 
+import java.io.File
+
 import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
 import org.apache.spark.sql.common.util.CarbonHiveContext._
 import org.apache.spark.sql.common.util.QueryTest
@@ -28,6 +30,10 @@ import org.scalatest.BeforeAndAfterAll
  */
 class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
 
+  val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
+    .getCanonicalPath
+  val resource = currentDirectory + "/src/test/resources/"
+
   override def beforeAll: Unit = {
 
     sql("CREATE TABLE IF NOT EXISTS table1(empno Int, empname Array<String>, designation String, doj Timestamp, "
@@ -118,9 +124,124 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
     sql("drop table default.table3")
   }
 
+
+  test("drop table and create table with different data type") {
+    sql(
+      "CREATE table dropTableTest1 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format' "
+
+    )
+
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest1 " +
+      "OPTIONS('DELIMITER' =  ',')")
+    sql("select * from dropTableTest1")
+    sql("drop table dropTableTest1")
+
+    sql(
+      "CREATE table dropTableTest1 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary String) stored by 'org.apache.carbondata.format' "
+    )
+
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest1 " +
+      "OPTIONS('DELIMITER' =  ',')")
+
+    sql("select * from dropTableTest1")
+
+  }
+
+
+  test("drop table and create table with dictionary exclude integer scenario") {
+    sql(
+      "CREATE table dropTableTest2 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format' " +
+      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='salary')"
+    )
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest2 " +
+      "OPTIONS('DELIMITER' =  ',')")
+    sql("select * from dropTableTest2")
+    sql("drop table dropTableTest2")
+    sql(
+      "CREATE table dropTableTest2 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary decimal) stored by 'org.apache.carbondata.format' " +
+      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='date')"
+    )
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest2 " +
+      "OPTIONS('DELIMITER' =  ',')")
+    sql("select * from dropTableTest2")
+
+  }
+
+  test("drop table and create table with dictionary exclude string scenario") {
+    sql("create database if not exists test")
+    sql(
+      "CREATE table test.dropTableTest3 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format' " +
+      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='salary')"
+    )
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE test.dropTableTest3 " +
+      "OPTIONS('DELIMITER' =  ',')")
+    sql("select * from test.dropTableTest3")
+    sql("drop table test.dropTableTest3")
+    sql(
+      "CREATE table test.dropTableTest3 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary decimal) stored by 'org.apache.carbondata.format' " +
+      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='date')"
+    )
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE test.dropTableTest3 " +
+      "OPTIONS('DELIMITER' =  ',')")
+    sql("select * from test.dropTableTest3")
+
+  }
+
+  test("drop table and create table with same name but different cols") {
+
+    sql(
+      "CREATE TABLE dropTableTest4 (imei string,age int,task bigint,name string,country string," +
+      "city string,sale int,num double,level decimal(10,3),quest bigint,productdate timestamp," +
+      "enddate timestamp,PointId double,score decimal(10,3))STORED BY 'org.apache.carbondata" +
+      ".format'")
+    sql(
+      "LOAD DATA INPATH './src/test/resources/big_int_Decimal.csv'  INTO TABLE dropTableTest4 " +
+      "options ('DELIMITER'=',', 'QUOTECHAR'='\"', 'COMPLEX_DELIMITER_LEVEL_1'='$'," +
+      "'COMPLEX_DELIMITER_LEVEL_2'=':', 'FILEHEADER'= '')")
+    sql("select * from dropTableTest4")
+    sql("drop table dropTableTest4")
+    sql(
+      "CREATE table dropTableTest4 (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary decimal) stored by 'org.apache.carbondata" +
+      ".format' " +
+      "TBLPROPERTIES('DICTIONARY_EXCLUDE'='date')"
+    )
+    sql(
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dropTableTest4 " +
+      "OPTIONS('DELIMITER' =  ',')")
+    sql("select * from dropTableTest4")
+
+
+  }
+
+
   override def afterAll: Unit = {
 
-    sql("drop table CaseSensitiveTable")
+    sql("drop table CaseInsensitiveTable")
+    sql("drop table dropTableTest1")
+    sql("drop table dropTableTest2")
+    sql("drop table test.dropTableTest3")
+    sql("drop database test")
+    sql("drop table dropTableTest4")
   }
 
 }


[2/2] incubator-carbondata git commit: [CARBONDATA-192] Invalidating the table from hive context while dropping the table. This closes #109

Posted by ra...@apache.org.
[CARBONDATA-192] Invalidating the table from hive context while dropping the table. This closes #109


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/4275277b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/4275277b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/4275277b

Branch: refs/heads/master
Commit: 4275277b9655ce94a9b26957b98b65491be1d1e4
Parents: ac5ddda 1a08f6c
Author: ravipesala <ra...@gmail.com>
Authored: Tue Aug 30 20:47:45 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Aug 30 20:47:45 2016 +0530

----------------------------------------------------------------------
 .../execution/command/carbonTableSchema.scala   |  12 +-
 .../spark/sql/hive/CarbonHiveMetadataUtil.scala |  58 +++++++++
 .../spark/sql/hive/CarbonMetastoreCatalog.scala |   2 +-
 .../deleteTable/TestDeleteTableNewDDL.scala     | 123 ++++++++++++++++++-
 4 files changed, 183 insertions(+), 12 deletions(-)
----------------------------------------------------------------------