You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/01/29 07:20:57 UTC
spark git commit: [SPARK-12968][SQL] Implement command to set current
database
Repository: spark
Updated Branches:
refs/heads/master b9dfdcc63 -> 66449b8dc
[SPARK-12968][SQL] Implement command to set current database
JIRA: https://issues.apache.org/jira/browse/SPARK-12968
Implement command to set current database.
Author: Liang-Chi Hsieh <vi...@gmail.com>
Author: Liang-Chi Hsieh <vi...@appier.com>
Closes #10916 from viirya/ddl-use-database.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/66449b8d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/66449b8d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/66449b8d
Branch: refs/heads/master
Commit: 66449b8dcdbc3dca126c34b42c4d0419c7648696
Parents: b9dfdcc
Author: Liang-Chi Hsieh <vi...@gmail.com>
Authored: Thu Jan 28 22:20:52 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Thu Jan 28 22:20:52 2016 -0800
----------------------------------------------------------------------
.../spark/sql/catalyst/analysis/Catalog.scala | 4 ++++
.../org/apache/spark/sql/execution/SparkQl.scala | 3 +++
.../org/apache/spark/sql/execution/commands.scala | 10 ++++++++++
.../spark/sql/hive/thriftserver/CliSuite.scala | 2 +-
.../spark/sql/hive/HiveMetastoreCatalog.scala | 4 ++++
.../scala/org/apache/spark/sql/hive/HiveQl.scala | 2 --
.../spark/sql/hive/client/ClientInterface.scala | 3 +++
.../spark/sql/hive/client/ClientWrapper.scala | 9 +++++++++
.../spark/sql/hive/execution/HiveQuerySuite.scala | 16 ++++++++++++++++
9 files changed, 50 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index a8f89ce..f2f9ec5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -46,6 +46,10 @@ trait Catalog {
def lookupRelation(tableIdent: TableIdentifier, alias: Option[String] = None): LogicalPlan
+ def setCurrentDatabase(databaseName: String): Unit = {
+ throw new UnsupportedOperationException
+ }
+
/**
* Returns tuples of (tableName, isTemporary) for all tables in the given database.
* isTemporary is a Boolean value indicates if a table is a temporary or not.
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
index f605530..a5bd8ee 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
@@ -55,6 +55,9 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
getClauses(Seq("TOK_QUERY", "FORMATTED", "EXTENDED"), explainArgs)
ExplainCommand(nodeToPlan(query), extended = extended.isDefined)
+ case Token("TOK_SWITCHDATABASE", Token(database, Nil) :: Nil) =>
+ SetDatabaseCommand(cleanIdentifier(database))
+
case Token("TOK_DESCTABLE", describeArgs) =>
// Reference: https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL
val Some(tableType) :: formatted :: extended :: pretty :: Nil =
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 3cfa3df..703e464 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -408,3 +408,13 @@ case class DescribeFunction(
}
}
}
+
+case class SetDatabaseCommand(databaseName: String) extends RunnableCommand {
+
+ override def run(sqlContext: SQLContext): Seq[Row] = {
+ sqlContext.catalog.setCurrentDatabase(databaseName)
+ Seq.empty[Row]
+ }
+
+ override val output: Seq[Attribute] = Seq.empty
+}
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index ab31d45..72da266 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -183,7 +183,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
"CREATE DATABASE hive_test_db;"
-> "OK",
"USE hive_test_db;"
- -> "OK",
+ -> "",
"CREATE TABLE hive_test(key INT, val STRING);"
-> "OK",
"SHOW TABLES;"
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index a9c0e9a..848aa4e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -711,6 +711,10 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
}
override def unregisterAllTables(): Unit = {}
+
+ override def setCurrentDatabase(databaseName: String): Unit = {
+ client.setCurrentDatabase(databaseName)
+ }
}
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 22841ed..752c037 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -155,8 +155,6 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
"TOK_SHOWLOCKS",
"TOK_SHOWPARTITIONS",
- "TOK_SWITCHDATABASE",
-
"TOK_UNLOCKTABLE"
)
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
index 9d9a55e..4eec3fe 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala
@@ -109,6 +109,9 @@ private[hive] trait ClientInterface {
/** Returns the name of the active database. */
def currentDatabase: String
+ /** Sets the name of current database. */
+ def setCurrentDatabase(databaseName: String): Unit
+
/** Returns the metadata for specified database, throwing an exception if it doesn't exist */
def getDatabase(name: String): HiveDatabase = {
getDatabaseOption(name).getOrElse(throw new NoSuchDatabaseException)
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
index ce7a305..5307e92 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.shims.{HadoopShims, ShimLoader}
import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.{Logging, SparkConf, SparkException}
+import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.execution.QueryExecutionException
import org.apache.spark.util.{CircularBuffer, Utils}
@@ -229,6 +230,14 @@ private[hive] class ClientWrapper(
state.getCurrentDatabase
}
+ override def setCurrentDatabase(databaseName: String): Unit = withHiveState {
+ if (getDatabaseOption(databaseName).isDefined) {
+ state.setCurrentDatabase(databaseName)
+ } else {
+ throw new NoSuchDatabaseException
+ }
+ }
+
override def createDatabase(database: HiveDatabase): Unit = withHiveState {
client.createDatabase(
new Database(
http://git-wip-us.apache.org/repos/asf/spark/blob/66449b8d/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 4659d74..9632d27 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -28,6 +28,7 @@ import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkException, SparkFiles}
import org.apache.spark.sql.{AnalysisException, DataFrame, Row}
+import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.catalyst.expressions.Cast
import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin
@@ -1262,6 +1263,21 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
}
+ test("use database") {
+ val currentDatabase = sql("select current_database()").first().getString(0)
+
+ sql("CREATE DATABASE hive_test_db")
+ sql("USE hive_test_db")
+ assert("hive_test_db" == sql("select current_database()").first().getString(0))
+
+ intercept[NoSuchDatabaseException] {
+ sql("USE not_existing_db")
+ }
+
+ sql(s"USE $currentDatabase")
+ assert(currentDatabase == sql("select current_database()").first().getString(0))
+ }
+
test("lookup hive UDF in another thread") {
val e = intercept[AnalysisException] {
range(1).selectExpr("not_a_udf()")
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org