You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/11/07 11:51:58 UTC

[5/5] incubator-carbondata git commit: Drop Database Cascade command to be supported in Carbon.

Drop Database Cascade command to be supported in Carbon.


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e0261831
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e0261831
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e0261831

Branch: refs/heads/branch-0.2
Commit: e0261831db61e6631cb0f38ae7e1761f0278a3b7
Parents: bc3f954
Author: nareshpr <pr...@gmail.com>
Authored: Thu Nov 3 14:53:03 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Mon Nov 7 17:17:51 2016 +0530

----------------------------------------------------------------------
 .../spark/sql/CarbonCatalystOperators.scala     | 15 ++++++
 .../org/apache/spark/sql/CarbonSqlParser.scala  | 46 +++++++++++++---
 .../spark/sql/hive/CarbonMetastoreCatalog.scala | 16 +++++-
 .../spark/sql/hive/CarbonStrategies.scala       |  9 ++++
 .../execution/command/CarbonHiveCommands.scala  | 55 ++++++++++++++++++++
 .../deleteTable/TestDeleteTableNewDDL.scala     | 19 ++++---
 6 files changed, 145 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e0261831/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
index 3729d3c..cece5e6 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
@@ -90,3 +90,18 @@ abstract class CarbonProfile(attributes: Seq[Attribute]) extends Serializable {
 case class IncludeProfile(attributes: Seq[Attribute]) extends CarbonProfile(attributes)
 
 case class ExcludeProfile(attributes: Seq[Attribute]) extends CarbonProfile(attributes)
+
+case class CreateDatabase(dbName: String, sql: String) extends LogicalPlan with Command {
+  override def children: Seq[LogicalPlan] = Seq.empty
+  override def output: Seq[AttributeReference] = {
+    Seq()
+  }
+}
+
+case class DropDatabase(dbName: String, isCascade: Boolean, sql: String)
+    extends LogicalPlan with Command {
+  override def children: Seq[LogicalPlan] = Seq.empty
+  override def output: Seq[AttributeReference] = {
+    Seq()
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e0261831/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
index da14b3e..c287617 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
@@ -199,14 +199,50 @@ class CarbonSqlParser()
 
   override protected lazy val start: Parser[LogicalPlan] = explainPlan | startCommand
 
-  protected lazy val startCommand: Parser[LogicalPlan] =
-    dropDatabaseCascade | loadManagement | describeTable | showLoads | alterTable | createTable
+  protected lazy val startCommand: Parser[LogicalPlan] = createDatabase | dropDatabase |
+    loadManagement | describeTable | showLoads | alterTable | createTable
 
   protected lazy val loadManagement: Parser[LogicalPlan] = deleteLoadsByID | deleteLoadsByLoadDate |
     cleanFiles | loadDataNew
 
   protected val escapedIdentifier = "`([^`]+)`".r
 
+  protected lazy val createDatabase: Parser[LogicalPlan] =
+    CREATE ~> (DATABASE | SCHEMA) ~> restInput ^^ {
+      case statement =>
+        val createDbSql = "CREATE DATABASE " + statement
+        var dbName = ""
+        // Get Ast node for create db command
+        val node = HiveQlWrapper.getAst(createDbSql)
+        node match {
+          // get dbname
+          case Token("TOK_CREATEDATABASE", children) =>
+            dbName = BaseSemanticAnalyzer.unescapeIdentifier(children(0).getText)
+        }
+        CreateDatabase(dbName, createDbSql)
+    }
+
+  protected lazy val dropDatabase: Parser[LogicalPlan] =
+    DROP ~> (DATABASE | SCHEMA) ~> restInput ^^ {
+      case statement =>
+        val dropDbSql = "DROP DATABASE " + statement
+        var dbName = ""
+        var isCascade = false
+        // Get Ast node for drop db command
+        val node = HiveQlWrapper.getAst(dropDbSql)
+        node match {
+          case Token("TOK_DROPDATABASE", children) =>
+            dbName = BaseSemanticAnalyzer.unescapeIdentifier(children(0).getText)
+            // check whether cascade drop db
+            children.collect {
+              case t@Token("TOK_CASCADE", _) =>
+                isCascade = true
+              case _ => // Unsupport features
+            }
+        }
+        DropDatabase(dbName, isCascade, dropDbSql)
+    }
+
   private def reorderDimensions(dims: Seq[Field]): Seq[Field] = {
     var complexDimensions: Seq[Field] = Seq()
     var dimensions: Seq[Field] = Seq()
@@ -1228,10 +1264,4 @@ class CarbonSqlParser()
           case _ => ExplainCommand(OneRowRelation)
       }
     }
-
-  protected lazy val dropDatabaseCascade: Parser[LogicalPlan] =
-    DROP ~> (DATABASE|SCHEMA) ~> opt(IF ~> EXISTS) ~> ident ~> CASCADE <~ opt(";") ^^ {
-      case cascade => throw new MalformedCarbonCommandException(
-          "Unsupported cascade operation in drop database/schema command")
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e0261831/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
index 7b3fea9..75dc235 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
@@ -575,6 +575,21 @@ class CarbonMetastoreCatalog(hiveContext: HiveContext, val storePath: String,
     (databaseName, tableName, dataPath, schema, partitioner, tableCreationTime)
   }
 
+  def createDatabaseDirectory(dbName: String) {
+    val databasePath = storePath + File.separator + dbName
+    val fileType = FileFactory.getFileType(databasePath)
+    FileFactory.mkdirs(databasePath, fileType)
+  }
+
+  def dropDatabaseDirectory(dbName: String) {
+    val databasePath = storePath + File.separator + dbName
+    val fileType = FileFactory.getFileType(databasePath)
+    if (FileFactory.isFileExist(databasePath, fileType)) {
+      val dbPath = FileFactory.getCarbonFile(databasePath, fileType)
+      CarbonUtil.deleteFoldersAndFiles(dbPath)
+    }
+  }
+
 }
 
 
@@ -653,5 +668,4 @@ object CarbonMetastoreTypes extends RegexParsers {
       case TimestampType => "timestamp"
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e0261831/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
index 0b4d020..03ddeff 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
@@ -32,6 +32,7 @@ import org.apache.spark.sql.execution.{ExecutedCommand, Filter, Project, SparkPl
 import org.apache.spark.sql.execution.command._
 import org.apache.spark.sql.execution.datasources.{DescribeCommand => LogicalDescribeCommand, LogicalRelation}
 import org.apache.spark.sql.hive.execution.{DropTable, HiveNativeCommand}
+import org.apache.spark.sql.hive.execution.command._
 import org.apache.spark.sql.optimizer.{CarbonAliasDecoderRelation, CarbonDecoderRelation}
 import org.apache.spark.sql.types.IntegerType
 
@@ -251,6 +252,14 @@ class CarbonStrategies(sqlContext: SQLContext) extends QueryPlanner[SparkPlan] {
         } else {
           ExecutedCommand(HiveNativeCommand(altertablemodel.alterSql)) :: Nil
         }
+      case CreateDatabase(dbName, sql) =>
+        ExecutedCommand(CreateDatabaseCommand(dbName, HiveNativeCommand(sql))) :: Nil
+      case DropDatabase(dbName, isCascade, sql) =>
+        if (isCascade) {
+          ExecutedCommand(DropDatabaseCascadeCommand(dbName, HiveNativeCommand(sql))) :: Nil
+        } else {
+          ExecutedCommand(DropDatabaseCommand(dbName, HiveNativeCommand(sql))) :: Nil
+        }
       case d: HiveNativeCommand =>
         try {
           val resolvedTable = sqlContext.executePlan(CarbonHiveSyntax.parse(d.sql)).optimizedPlan

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e0261831/integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
new file mode 100644
index 0000000..e81e0b5
--- /dev/null
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.hive.execution.command
+
+import org.apache.spark.sql._
+import org.apache.spark.sql.execution.RunnableCommand
+import org.apache.spark.sql.execution.command.DropTableCommand
+import org.apache.spark.sql.hive.execution.HiveNativeCommand
+
+private[hive] case class CreateDatabaseCommand(dbName: String,
+    command: HiveNativeCommand) extends RunnableCommand {
+  def run(sqlContext: SQLContext): Seq[Row] = {
+    val rows = command.run(sqlContext)
+    CarbonEnv.getInstance(sqlContext).carbonCatalog.createDatabaseDirectory(dbName)
+    rows
+  }
+}
+
+private[hive] case class DropDatabaseCommand(dbName: String,
+    command: HiveNativeCommand) extends RunnableCommand {
+  def run(sqlContext: SQLContext): Seq[Row] = {
+    val rows = command.run(sqlContext)
+    CarbonEnv.getInstance(sqlContext).carbonCatalog.dropDatabaseDirectory(dbName)
+    rows
+  }
+}
+
+private[hive] case class DropDatabaseCascadeCommand(dbName: String,
+    command: HiveNativeCommand) extends RunnableCommand {
+  def run(sqlContext: SQLContext): Seq[Row] = {
+    val tablesInDB = CarbonEnv.getInstance(sqlContext).carbonCatalog
+        .getTables(Some(dbName))(sqlContext).map(x => x._1)
+    val rows = command.run(sqlContext)
+    tablesInDB.foreach{tableName =>
+      DropTableCommand(true, Some(dbName), tableName).run(sqlContext)
+    }
+    CarbonEnv.getInstance(sqlContext).carbonCatalog.dropDatabaseDirectory(dbName)
+    rows
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e0261831/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index adec571..cdfe2e6 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -50,20 +50,27 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
   // normal deletion case
   test("drop table Test with new DDL") {
     sql("drop table table1")
-
   }
   
   test("test drop database cascade command") {
     sql("create database testdb")
+    sql("use testdb")
+    sql("CREATE TABLE IF NOT EXISTS testtable(empno Int, empname string, utilization Int,salary Int)"
+        + " STORED BY 'org.apache.carbondata.format' ")
+    try {
+      sql("drop database testdb")
+      assert(false)
+    } catch {
+      case e : Exception => 
+    }
+    sql("drop database testdb cascade")
     try {
-      sql("drop database testdb cascade")
+      sql("use testdb")
       assert(false)
     } catch {
-      case e : MalformedCarbonCommandException => {
-        assert(e.getMessage.equals("Unsupported cascade operation in drop database/schema command"))
-      }
+      case e : Exception => 
     }
-    sql("drop database testdb")
+    sql("use default")
   }
 
   // deletion case with if exists