You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2018/03/31 06:27:10 UTC

carbondata git commit: [CARBONDATA-2296] Fix create datamap command with out on table syntax and correct the target location of test framework

Repository: carbondata
Updated Branches:
  refs/heads/master e8da88002 -> cd509d5db


[CARBONDATA-2296] Fix create datamap command with out on table syntax and correct the target location of test framework

Problem
1 Create datamap command fails if user does not mention on table
2 Test framework try to create metastore and store location in integration/spark-common location but if it runs from other modules like datamap it creates target location in wrong place.
Solution
Check the null for parent table if not present and always take the present module target location as store path location if it not under integration module.

This closes #2122


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/cd509d5d
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/cd509d5d
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/cd509d5d

Branch: refs/heads/master
Commit: cd509d5dbee1311ce14374b79b7c30d400826702
Parents: e8da880
Author: ravipesala <ra...@gmail.com>
Authored: Sat Mar 31 08:12:44 2018 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Sat Mar 31 14:26:51 2018 +0800

----------------------------------------------------------------------
 .../core/datamap/IndexDataMapProvider.java      |  4 ++
 .../testsuite/datamap/TestDataMapStatus.scala   | 44 ++++++++++++++++++++
 .../spark/sql/test/TestQueryExecutor.scala      | 42 +++++++++++++++----
 .../datamap/CarbonCreateDataMapCommand.scala    |  5 ++-
 4 files changed, 85 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/cd509d5d/core/src/main/java/org/apache/carbondata/core/datamap/IndexDataMapProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/IndexDataMapProvider.java b/core/src/main/java/org/apache/carbondata/core/datamap/IndexDataMapProvider.java
index 04cec70..02ff1a1 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/IndexDataMapProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/IndexDataMapProvider.java
@@ -41,6 +41,10 @@ public class IndexDataMapProvider implements DataMapProvider {
   @Override
   public void initMeta(CarbonTable mainTable, DataMapSchema dataMapSchema, String ctasSqlStatement)
       throws MalformedDataMapCommandException, IOException {
+    if (mainTable == null) {
+      throw new MalformedDataMapCommandException(
+          "Parent table is required to create index datamap");
+    }
     ArrayList<RelationIdentifier> relationIdentifiers = new ArrayList<>();
     dataMapSchema.setParentTables(relationIdentifiers);
     relationIdentifiers.add(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cd509d5d/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
index 17eefc4..32c10ef 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
@@ -24,6 +24,7 @@ import scala.collection.JavaConverters._
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datamap.dev.DataMapWriter
 import org.apache.carbondata.core.datamap.dev.cgdatamap.{CoarseGrainDataMap, CoarseGrainDataMapFactory}
@@ -115,6 +116,49 @@ class TestDataMapStatus extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS datamapstatustest2")
   }
 
+  test("datamap create without on table test") {
+    sql("DROP TABLE IF EXISTS datamapstatustest3")
+    sql(
+      """
+        | CREATE TABLE datamapstatustest3(id int, name string, city string, age int)
+        | STORED BY 'org.apache.carbondata.format'
+      """.stripMargin)
+    intercept[MalformedDataMapCommandException] {
+      sql(
+        s"""create datamap statusdatamap3 using '${
+          classOf[TestDataMap]
+            .getName
+        }' as select id,sum(age) from datamapstatustest3 group by id""".stripMargin)
+    }
+
+    sql(
+      s"""create datamap statusdatamap3 on table datamapstatustest3 using '${
+        classOf[TestDataMap]
+          .getName
+      }' as select id,sum(age) from datamapstatustest3 group by id""".stripMargin)
+
+    var details = DataMapStatusManager.readDataMapStatusDetails()
+
+    assert(details.length == 1)
+
+    assert(details.exists(p => p.getDataMapName.equals("statusdatamap3") && p.getStatus == DataMapStatus.DISABLED))
+
+    sql(s"LOAD DATA LOCAL INPATH '$testData' into table datamapstatustest3")
+    details = DataMapStatusManager.readDataMapStatusDetails()
+    assert(details.length == 1)
+    assert(details.exists(p => p.getDataMapName.equals("statusdatamap3") && p.getStatus == DataMapStatus.DISABLED))
+
+    sql(s"refresh datamap statusdatamap3")
+
+    details = DataMapStatusManager.readDataMapStatusDetails()
+    assert(details.length == 1)
+    assert(details.exists(p => p.getDataMapName.equals("statusdatamap3") && p.getStatus == DataMapStatus.ENABLED))
+
+    checkExistence(sql(s"show datamap"), true, "statusdatamap3")
+
+    sql("DROP TABLE IF EXISTS datamapstatustest3")
+  }
+
   override def afterAll {
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_HIVE_SCHEMA_META_STORE,
       CarbonCommonConstants.ENABLE_HIVE_SCHEMA_META_STORE_DEFAULT)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cd509d5d/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
index 34f901f..daed985 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
@@ -46,12 +46,38 @@ object TestQueryExecutor {
 
   private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
 
-  val projectPath = new File(this.getClass.getResource("/").getPath + "../../../..")
-    .getCanonicalPath.replaceAll("\\\\", "/")
+  val (projectPath, isIntegrationModule, localTarget) = {
+    val path = new File(this.getClass.getResource("/").getPath)
+      .getCanonicalPath.replaceAll("\\\\", "/")
+    // Check whether it is integration module
+    val isIntegrationModule = path.indexOf("/integration/") > -1
+    // Get the local target folder path
+    val targetPath = path.substring(0, path.lastIndexOf("/target/") + 8)
+    // Get the relative project path
+    val projectPathLocal = if (isIntegrationModule) {
+      path.substring(0, path.indexOf("/integration/"))
+    } else if (path.indexOf("/datamap/") > -1) {
+      path.substring(0, path.indexOf("/datamap/"))
+    } else if (path.indexOf("/tools/") > -1) {
+      path.substring(0, path.indexOf("/tools/"))
+    } else if (path.indexOf("/examples/") > -1) {
+      path.substring(0, path.indexOf("/examples/"))
+    } else {
+      path
+    }
+    (projectPathLocal, isIntegrationModule, targetPath)
+  }
   LOGGER.info(s"project path: $projectPath")
   val integrationPath = s"$projectPath/integration"
-  val metastoredb = s"$integrationPath/spark-common/target"
-  val location = s"$integrationPath/spark-common/target/dbpath"
+  val target = if (isIntegrationModule) {
+    // If integration module , always point to spark-common/target location
+    s"$integrationPath/spark-common/target"
+  } else {
+    // Otherwise point to respective target folder location
+    localTarget
+  }
+  val metastoredb = target
+  val location = s"$target/dbpath"
   val masterUrl = {
     val property = System.getProperty("spark.master.url")
     if (property == null) {
@@ -86,7 +112,7 @@ object TestQueryExecutor {
   } else {
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCK_TYPE,
       CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL)
-    s"$integrationPath/spark-common/target/store"
+    s"$target/store"
   }
   val warehouse = if (hdfsUrl.startsWith("hdfs://")) {
     val carbonFile = FileFactory.
@@ -94,13 +120,13 @@ object TestQueryExecutor {
     FileFactory.deleteAllCarbonFilesOfDir(carbonFile)
     s"$hdfsUrl/warehouse_" + System.nanoTime()
   } else {
-    s"$integrationPath/spark-common/target/warehouse"
+    s"$target/warehouse"
   }
 
   val badStoreLocation = if (hdfsUrl.startsWith("hdfs://")) {
        s"$hdfsUrl/bad_store_" + System.nanoTime()
       } else {
-        s"$integrationPath/spark-common/target/bad_store"
+        s"$target/bad_store"
       }
     createDirectory(badStoreLocation)
 
@@ -109,7 +135,7 @@ object TestQueryExecutor {
     FileFactory.mkdirs(p, FileFactory.getFileType(p))
     p
   } else {
-    val p = s"$integrationPath/spark-common/target/hiveresultpath"
+    val p = s"$target/hiveresultpath"
     new File(p).mkdirs()
     p
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cd509d5d/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
index 91ff4d1..a46d514 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
@@ -55,7 +55,7 @@ case class CarbonCreateDataMapCommand(
         CarbonEnv.getCarbonTable(table.database, table.table)(sparkSession)
       case _ => null
     }
-    if (mainTable.getDataMapSchema(dataMapName) != null) {
+    if (mainTable != null && mainTable.getDataMapSchema(dataMapName) != null) {
       if (!ifNotExistsSet) {
         throw new MalformedDataMapCommandException(s"DataMap name '$dataMapName' already exist")
       } else {
@@ -64,7 +64,8 @@ case class CarbonCreateDataMapCommand(
     }
 
     dataMapSchema = new DataMapSchema(dataMapName, dmClassName)
-    if (mainTable.isStreamingTable &&
+    if (mainTable != null &&
+        mainTable.isStreamingTable &&
         !(dataMapSchema.getProviderName.equalsIgnoreCase(DataMapClassProvider.PREAGGREGATE.toString)
           || dataMapSchema.getProviderName
             .equalsIgnoreCase(DataMapClassProvider.TIMESERIES.toString))) {