You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/11/14 15:21:46 UTC
carbondata git commit: [CARBONDATA-1579][PREAGG][DATAMAP] Support
DataMap show
Repository: carbondata
Updated Branches:
refs/heads/master 17d07319c -> d062ab41d
[CARBONDATA-1579][PREAGG][DATAMAP] Support DataMap show
Added support for show datamap on table.
SHOW DATAMAP ON TABLE test
The above command shows all datamaps on the table name test.
This closes #1490
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d062ab41
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d062ab41
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d062ab41
Branch: refs/heads/master
Commit: d062ab41dfd8b7708b43b5ed74d12591029ff4e5
Parents: 17d0731
Author: ravipesala <ra...@gmail.com>
Authored: Tue Nov 14 17:33:58 2017 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Tue Nov 14 23:21:35 2017 +0800
----------------------------------------------------------------------
.../testsuite/datamap/TestDataMapCommand.scala | 37 +++++++++++
.../datamap/CarbonDataMapShowCommand.scala | 67 ++++++++++++++++++++
.../sql/parser/CarbonSpark2SqlParser.scala | 14 +++-
3 files changed, 116 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/d062ab41/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
index b7121b7..5db0a0f 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala
@@ -26,6 +26,7 @@ class TestDataMapCommand extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table if exists datamaptest")
+ sql("drop table if exists datamapshowtest")
sql("create table datamaptest (a string, b string, c string) stored by 'carbondata'")
}
@@ -98,8 +99,44 @@ class TestDataMapCommand extends QueryTest with BeforeAndAfterAll {
assert(dataMapSchemaList.size() == 3)
}
+ test("test show datamap without preaggregate") {
+ sql("drop table if exists datamapshowtest")
+ sql("create table datamapshowtest (a string, b string, c string) stored by 'carbondata'")
+ sql("create datamap datamap1 on table datamapshowtest using 'new.class' dmproperties('key'='value')")
+ sql("create datamap datamap2 on table datamapshowtest using 'new.class' dmproperties('key'='value')")
+ checkExistence(sql("show datamap on table datamapshowtest"), true, "datamap1", "datamap2", "(NA)", "new.class")
+ }
+
+ test("test show datamap with preaggregate") {
+ sql("drop table if exists datamapshowtest")
+ sql("create table datamapshowtest (a string, b string, c string) stored by 'carbondata'")
+ sql("create datamap datamap1 on table datamapshowtest using 'preaggregate' as select count(a) from datamapshowtest")
+ sql("create datamap datamap2 on table datamapshowtest using 'new.class' dmproperties('key'='value')")
+ val frame = sql("show datamap on table datamapshowtest")
+ assert(frame.collect().length == 2)
+ checkExistence(frame, true, "datamap1", "datamap2", "(NA)", "new.class", "default.datamapshowtest_datamap1")
+ }
+
+ test("test show datamap with no datamap") {
+ sql("drop table if exists datamapshowtest")
+ sql("create table datamapshowtest (a string, b string, c string) stored by 'carbondata'")
+ assert(sql("show datamap on table datamapshowtest").collect().length == 0)
+ }
+
+ test("test show datamap after dropping datamap") {
+ sql("drop table if exists datamapshowtest")
+ sql("create table datamapshowtest (a string, b string, c string) stored by 'carbondata'")
+ sql("create datamap datamap1 on table datamapshowtest using 'preaggregate' as select count(a) from datamapshowtest")
+ sql("create datamap datamap2 on table datamapshowtest using 'new.class' dmproperties('key'='value')")
+ sql("drop datamap datamap1 on table datamapshowtest")
+ val frame = sql("show datamap on table datamapshowtest")
+ assert(frame.collect().length == 1)
+ checkExistence(frame, true, "datamap2", "(NA)", "new.class")
+ }
+
override def afterAll {
sql("drop table if exists datamaptest")
+ sql("drop table if exists datamapshowtest")
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/d062ab41/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala
new file mode 100644
index 0000000..822455c
--- /dev/null
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.datamap
+
+import scala.collection.JavaConverters._
+
+import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
+import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
+import org.apache.spark.sql.execution.command.{Checker, DataProcessCommand, RunnableCommand}
+import org.apache.spark.sql.hive.CarbonRelation
+import org.apache.spark.sql.types.StringType
+
+/**
+ * Show the datamaps on the table
+ * @param databaseNameOp
+ * @param tableName
+ */
+case class CarbonDataMapShowCommand(
+ databaseNameOp: Option[String],
+ tableName: String)
+ extends RunnableCommand with DataProcessCommand {
+
+ override def output: Seq[Attribute] = {
+ Seq(AttributeReference("DataMapName", StringType, nullable = false)(),
+ AttributeReference("ClassName", StringType, nullable = false)(),
+ AttributeReference("Associated Table", StringType, nullable = false)())
+ }
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ processData(sparkSession)
+ }
+
+ override def processData(sparkSession: SparkSession): Seq[Row] = {
+ Checker.validateTableExists(databaseNameOp, tableName, sparkSession)
+ val carbonTable = CarbonEnv.getInstance(sparkSession).carbonMetastore.
+ lookupRelation(databaseNameOp, tableName)(sparkSession).asInstanceOf[CarbonRelation].
+ tableMeta.carbonTable
+ val schemaList = carbonTable.getTableInfo.getDataMapSchemaList
+ if (schemaList != null && schemaList.size() > 0) {
+ schemaList.asScala.map { s =>
+ var table = "(NA)"
+ val relationIdentifier = s.getRelationIdentifier
+ if (relationIdentifier != null) {
+ table = relationIdentifier.getDatabaseName + "." + relationIdentifier.getTableName
+ }
+ Row(s.getDataMapName, s.getClassName, table)
+ }
+ } else {
+ Seq.empty
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/d062ab41/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
index be89248..6df5a04 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.CarbonTableIdentifierImplicit._
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.command._
-import org.apache.spark.sql.execution.command.datamap.{CarbonCreateDataMapCommand, CarbonDropDataMapCommand}
+import org.apache.spark.sql.execution.command.datamap.{CarbonCreateDataMapCommand, CarbonDataMapShowCommand, CarbonDropDataMapCommand}
import org.apache.spark.sql.execution.command.management.{AlterTableCompactionCommand, CleanFilesCommand, DeleteLoadByIdCommand, DeleteLoadByLoadDateCommand, LoadTableCommand}
import org.apache.spark.sql.execution.command.partition.{AlterTableDropCarbonPartitionCommand, AlterTableSplitCarbonPartitionCommand}
import org.apache.spark.sql.execution.command.schema.{CarbonAlterTableAddColumnCommand, CarbonAlterTableDataTypeChangeCommand, CarbonAlterTableDropColumnCommand}
@@ -80,7 +80,7 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
alterAddPartition | alterSplitPartition | alterDropPartition
protected lazy val datamapManagement: Parser[LogicalPlan] =
- createDataMap | dropDataMap
+ createDataMap | dropDataMap | showDataMap
protected lazy val alterAddPartition: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ (ADD ~> PARTITION ~>
@@ -152,6 +152,16 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
CarbonDropDataMapCommand(dmname, ifexists.isDefined, dbName, tableName)
}
+ /**
+ * The syntax of show datamap is used to show datamaps on the table
+ * SHOW DATAMAP ON TABLE tableName
+ */
+ protected lazy val showDataMap: Parser[LogicalPlan] =
+ SHOW ~> DATAMAP ~> ON ~> TABLE ~> (ident <~ ".").? ~ ident <~ opt(";") ^^ {
+ case databaseName ~ tableName =>
+ CarbonDataMapShowCommand(convertDbNameToLowerCase(databaseName), tableName.toLowerCase())
+ }
+
protected lazy val deleteRecords: Parser[LogicalPlan] =
(DELETE ~> FROM ~> table) ~ restInput.? <~ opt(";") ^^ {
case table ~ rest =>