You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2017/11/13 11:59:33 UTC

carbondata git commit: [CARBONDATA-1527] [CARBONDATA-1528] [PreAgg] Restrict alter/update/delete for pre-aggregate table

Repository: carbondata
Updated Branches:
  refs/heads/pre-aggregate 74f3c35a0 -> df41cc66e


[CARBONDATA-1527] [CARBONDATA-1528] [PreAgg] Restrict alter/update/delete for pre-aggregate table

This closes #1476


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/df41cc66
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/df41cc66
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/df41cc66

Branch: refs/heads/pre-aggregate
Commit: df41cc66e9074c3ba1a33a2a40b21df2a7bab7b9
Parents: 74f3c35
Author: kunal642 <ku...@gmail.com>
Authored: Mon Oct 23 18:28:15 2017 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Mon Nov 13 17:29:12 2017 +0530

----------------------------------------------------------------------
 .../core/metadata/schema/table/CarbonTable.java |   4 +
 .../schema/table/RelationIdentifier.java        |  12 ++
 .../preaggregate/TestPreAggregateLoad.scala     |   3 +-
 .../iud/DeleteCarbonTableTestCase.scala         |  15 ++
 .../iud/UpdateCarbonTableTestCase.scala         |  15 ++
 .../testsuite/sortcolumns/TestSortColumns.scala |   3 +-
 .../carbondata/events/AlterTableEvents.scala    |  12 +-
 .../org/apache/carbondata/events/Events.scala   |   7 +-
 .../management/DeleteLoadByIdCommand.scala      |   2 +-
 .../DeleteLoadByLoadDateCommand.scala           |   6 +-
 .../CreatePreAggregateTableCommand.scala        |   2 +
 .../preaaggregate/PreAggregateListeners.scala   | 188 ++++++++++++++++++-
 .../schema/CarbonAlterTableRenameCommand.scala  |   2 +-
 .../spark/sql/hive/CarbonSessionState.scala     |  13 +-
 .../AlterTableValidationTestCase.scala          |  21 ++-
 .../vectorreader/ChangeDataTypeTestCases.scala  |  17 ++
 .../vectorreader/DropColumnTestCases.scala      |  16 ++
 .../apache/spark/util/CarbonCommandSuite.scala  |  18 +-
 18 files changed, 339 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index 4a6fb8b..ca0952d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -707,4 +707,8 @@ public class CarbonTable implements Serializable {
         .getParentRelationIdentifiers().isEmpty();
   }
 
+  public boolean hasPreAggregateTables() {
+    return tableInfo.getDataMapSchemaList() != null && !tableInfo
+        .getDataMapSchemaList().isEmpty();
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/RelationIdentifier.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/RelationIdentifier.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/RelationIdentifier.java
index 2a2d937..c9c44bf 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/RelationIdentifier.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/RelationIdentifier.java
@@ -26,6 +26,18 @@ import java.io.Serializable;
  */
 public class RelationIdentifier implements Serializable, Writable {
 
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public void setTableId(String tableId) {
+    this.tableId = tableId;
+  }
+
   private String databaseName;
 
   private String tableName;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggregateLoad.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggregateLoad.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggregateLoad.scala
index 0c65577..1f576c5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggregateLoad.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggregateLoad.scala
@@ -19,8 +19,9 @@ package org.apache.carbondata.integration.spark.testsuite.preaggregate
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
+import org.scalatest.{BeforeAndAfterAll, Ignore}
 
+@Ignore
 class TestPreAggregateLoad extends QueryTest with BeforeAndAfterAll {
   
   val testData = s"$resourcesPath/sample.csv"

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
index 3c2842c..a2bd6aa 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
@@ -129,6 +129,21 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS default.carbon2")
   }
 
+  test("test if delete is unsupported for pre-aggregate tables") {
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+    sql("create table preaggMain (a string, b string, c string) stored by 'carbondata'")
+    sql("create table preagg1 stored BY 'carbondata' tblproperties('parent'='PreAggMain') as select a,sum(b) from PreAggMain group by a")
+    intercept[RuntimeException] {
+      sql("delete from preaggmain where a = 'abc'").show()
+    }.getMessage.contains("Delete operation is not supported for tables")
+    intercept[RuntimeException] {
+      sql("delete from preagg1 where preaggmain_a = 'abc'").show()
+    }.getMessage.contains("Delete operation is not supported for pre-aggregate table")
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+  }
+
 
   override def afterAll {
     sql("use default")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
index db289d9..4c43ec0 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
@@ -513,6 +513,21 @@ class UpdateCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS iud.rand")
   }
 
+  test("test if update is unsupported for pre-aggregate tables") {
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+    sql("create table preaggMain (a string, b string, c string) stored by 'carbondata'")
+    sql("create table preagg1 stored BY 'carbondata' tblproperties('parent'='PreAggMain') as select a,sum(b) from PreAggMain group by a")
+    intercept[RuntimeException] {
+      sql("update preaggmain set (a)=('a')").show
+    }.getMessage.contains("Update operation is not supported for tables")
+    intercept[RuntimeException] {
+      sql("update preagg1 set (a)=('a')").show
+    }.getMessage.contains("Update operation is not supported for pre-aggregate table")
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+  }
+
   override def afterAll {
     sql("use default")
     sql("drop database  if exists iud cascade")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
index b5fd8a9..6c5aa55 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
@@ -35,7 +35,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     sql("CREATE TABLE tableOne(id int, name string, city string, age int) STORED BY 'org.apache.carbondata.format'")
     sql("CREATE TABLE tableTwo(id int, age int) STORED BY 'org.apache.carbondata.format'")
     sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/measureinsertintotest.csv' into table tableOne")
-    sql("insert into table tableTwo select id, count(age) from tableOne group by id")
+
   }
 
   test("create table sort columns dictionary include - int") {
@@ -335,6 +335,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
   }
 
   test("Test tableTwo data") {
+    sql("insert into table tableTwo select id, count(age) from tableOne group by id")
     checkAnswer(
       sql("select id,age from tableTwo order by id"),
       Seq(Row(1, 1), Row(2, 1), Row(3, 2), Row(4, 2)))

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
index ec79acc..2f7cf63 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
@@ -18,7 +18,7 @@ package org.apache.carbondata.events
 
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.SQLContext
-import org.apache.spark.sql.execution.command.{AlterTableDropColumnModel, AlterTableRenameModel}
+import org.apache.spark.sql.execution.command.{AlterTableDataTypeChangeModel, AlterTableDropColumnModel, AlterTableRenameModel}
 
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel
@@ -35,6 +35,16 @@ case class AlterTableDropColumnPreEvent(carbonTable: CarbonTable,
 
 
 /**
+ * Class for handling clean up in case of any failure and abort the operation
+ *
+ * @param carbonTable
+ * @param alterTableDataTypeChangeModel
+ */
+case class AlterTableDataTypeChangePreEvent(carbonTable: CarbonTable,
+        alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel)
+  extends Event with AlterTableDataTypeChangeEventInfo
+
+/**
  *
  * @param carbonTable
  * @param alterTableDropColumnModel

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala
index 4f8d57e..9796dea 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala
@@ -18,7 +18,7 @@
 package org.apache.carbondata.events
 
 import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.execution.command.{AlterTableDropColumnModel, AlterTableRenameModel}
+import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableDataTypeChangeModel, AlterTableDropColumnModel, AlterTableRenameModel}
 
 import org.apache.carbondata.core.metadata.CarbonTableIdentifier
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
@@ -69,6 +69,11 @@ trait AlterTableDropColumnEventInfo {
   val alterTableDropColumnModel: AlterTableDropColumnModel
 }
 
+trait AlterTableDataTypeChangeEventInfo {
+  val carbonTable: CarbonTable
+  val alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel
+}
+
 /**
  * event for alter_table_rename
  */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByIdCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByIdCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByIdCommand.scala
index 9ea4018..6a0465c 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByIdCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByIdCommand.scala
@@ -57,7 +57,7 @@ case class DeleteLoadByIdCommand(
       DeleteSegmentByIdPostEvent(carbonTable,
         loadIds,
         sparkSession)
-    OperationListenerBus.getInstance.fireEvent(deleteSegmentByIdPreEvent, operationContext)
+    OperationListenerBus.getInstance.fireEvent(deleteSegmentPostEvent, operationContext)
     Seq.empty
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByLoadDateCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByLoadDateCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByLoadDateCommand.scala
index 58d8236..83f41bb 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByLoadDateCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/DeleteLoadByLoadDateCommand.scala
@@ -51,14 +51,12 @@ case class DeleteLoadByLoadDateCommand(
       loadDate,
       GetDB.getDatabaseName(databaseNameOp, sparkSession),
       tableName,
-      carbonTable
-    )
-
+      carbonTable)
     val deleteSegmentPostEvent: DeleteSegmentByDatePostEvent =
       DeleteSegmentByDatePostEvent(carbonTable,
         loadDate,
         sparkSession)
-    OperationListenerBus.getInstance.fireEvent(deleteSegmentByDatePreEvent, operationContext)
+    OperationListenerBus.getInstance.fireEvent(deleteSegmentPostEvent, operationContext)
 
     Seq.empty
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
index b952285..e12cbb9 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/CreatePreAggregateTableCommand.scala
@@ -137,3 +137,5 @@ case class CreatePreAggregateTableCommand(
     Seq.empty
   }
 }
+
+

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
index b507856..2ce97fe 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateListeners.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.execution.command.CarbonDropTableCommand
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchema
-import org.apache.carbondata.events.{DropTablePostEvent, Event, LoadTablePostExecutionEvent, OperationContext, OperationEventListener}
+import org.apache.carbondata.events._
 
 object DropPreAggregateTablePostListener extends OperationEventListener {
 
@@ -79,3 +79,189 @@ object LoadPostAggregateListener extends OperationEventListener {
     }
   }
 }
+
+object PreAggregateDataTypeChangePreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event, operationContext: OperationContext): Unit = {
+    val dataTypeChangePreListener = event.asInstanceOf[AlterTableDataTypeChangePreEvent]
+    val carbonTable = dataTypeChangePreListener.carbonTable
+    val alterTableDataTypeChangeModel = dataTypeChangePreListener.alterTableDataTypeChangeModel
+    val columnToBeAltered: String = alterTableDataTypeChangeModel.columnName
+    val dataMapSchemas = carbonTable.getTableInfo.getDataMapSchemaList
+    if (dataMapSchemas != null && !dataMapSchemas.isEmpty) {
+      dataMapSchemas.asScala.foreach { dataMapSchema =>
+          val childColumns = dataMapSchema.getChildSchema.getListOfColumns
+          if (childColumns.asScala.exists(_.getColumnName.equalsIgnoreCase(columnToBeAltered))) {
+            throw new UnsupportedOperationException(s"Column $columnToBeAltered exists in a " +
+                                                    s"pre-aggregate table ${ dataMapSchema.toString
+                                                    }. Cannot change datatype")
+          }
+      }
+
+      if (carbonTable.isPreAggregateTable) {
+        throw new UnsupportedOperationException(s"Cannot change data type for columns in " +
+                                                s"pre-aggreagate table ${
+                                                  carbonTable.getDatabaseName
+                                                }.${ carbonTable.getFactTableName }")
+      }
+    }
+  }
+}
+
+object PreAggregateDeleteSegmentByDatePreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event, operationContext: OperationContext): Unit = {
+    val deleteSegmentByDatePreEvent = event.asInstanceOf[DeleteSegmentByDatePreEvent]
+    val carbonTable = deleteSegmentByDatePreEvent.carbonTable
+    if (carbonTable != null) {
+      if (carbonTable.hasPreAggregateTables) {
+        throw new UnsupportedOperationException(
+          "Delete segment operation is not supported on tables which have a pre-aggregate table. " +
+          "Drop pre-aggregation table to continue")
+      }
+      if (carbonTable.isPreAggregateTable) {
+        throw new UnsupportedOperationException(
+          "Delete segment operation is not supported on pre-aggregate table")
+      }
+    }
+  }
+}
+
+object PreAggregateDeleteSegmentByIdPreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event, operationContext: OperationContext): Unit = {
+    val tableEvent = event.asInstanceOf[DeleteSegmentByIdPreEvent]
+    val carbonTable = tableEvent.carbonTable
+    if (carbonTable != null) {
+      if (carbonTable.hasPreAggregateTables) {
+        throw new UnsupportedOperationException(
+          "Delete segment operation is not supported on tables which have a pre-aggregate table")
+      }
+      if (carbonTable.isPreAggregateTable) {
+        throw new UnsupportedOperationException(
+          "Delete segment operation is not supported on pre-aggregate table")
+      }
+    }
+  }
+
+}
+
+object PreAggregateDropColumnPreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event, operationContext: OperationContext): Unit = {
+    val dataTypeChangePreListener = event.asInstanceOf[AlterTableDropColumnPreEvent]
+    val carbonTable = dataTypeChangePreListener.carbonTable
+    val alterTableDropColumnModel = dataTypeChangePreListener.alterTableDropColumnModel
+    val columnsToBeDropped = alterTableDropColumnModel.columns
+    val dataMapSchemas = carbonTable.getTableInfo.getDataMapSchemaList
+    if (dataMapSchemas != null && !dataMapSchemas.isEmpty) {
+      dataMapSchemas.asScala.foreach { dataMapSchema =>
+          val parentColumnNames = dataMapSchema.getChildSchema.getListOfColumns.asScala
+            .flatMap(_.getParentColumnTableRelations.asScala.map(_.getColumnName))
+          val columnExistsInChild = parentColumnNames.collectFirst {
+            case parentColumnName if columnsToBeDropped.contains(parentColumnName) =>
+              parentColumnName
+          }
+          if (columnExistsInChild.isDefined) {
+            throw new UnsupportedOperationException(
+              s"Column ${ columnExistsInChild.head } cannot be dropped because it exists in a " +
+              s"pre-aggregate table ${ dataMapSchema.getRelationIdentifier.toString}")
+          }
+      }
+      if (carbonTable.isPreAggregateTable) {
+        throw new UnsupportedOperationException(s"Cannot drop columns in pre-aggreagate table ${
+          carbonTable.getDatabaseName}.${ carbonTable.getFactTableName }")
+      }
+    }
+  }
+}
+
+object PreAggregateRenameTablePreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event,
+      operationContext: OperationContext): Unit = {
+    val renameTablePostListener = event.asInstanceOf[AlterTableRenamePreEvent]
+    val carbonTable = renameTablePostListener.carbonTable
+    if (carbonTable.isPreAggregateTable) {
+      throw new UnsupportedOperationException(
+        "Rename operation for pre-aggregate table is not supported.")
+    }
+    if (carbonTable.hasPreAggregateTables) {
+      throw new UnsupportedOperationException(
+        "Rename operation is not supported for table with pre-aggregate tables")
+    }
+  }
+}
+
+object UpdatePreAggregatePreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event, operationContext: OperationContext): Unit = {
+    val tableEvent = event.asInstanceOf[UpdateTablePreEvent]
+    val carbonTable = tableEvent.carbonTable
+    if (carbonTable != null) {
+      if (carbonTable.hasPreAggregateTables) {
+        throw new UnsupportedOperationException(
+          "Update operation is not supported for tables which have a pre-aggregate table. Drop " +
+          "pre-aggregate tables to continue.")
+      }
+      if (carbonTable.isPreAggregateTable) {
+        throw new UnsupportedOperationException(
+          "Update operation is not supported for pre-aggregate table")
+      }
+    }
+  }
+}
+
+object DeletePreAggregatePreListener extends OperationEventListener {
+  /**
+   * Called on a specified event occurrence
+   *
+   * @param event
+   * @param operationContext
+   */
+  override def onEvent(event: Event, operationContext: OperationContext): Unit = {
+    val tableEvent = event.asInstanceOf[DeleteFromTablePreEvent]
+    val carbonTable = tableEvent.carbonTable
+    if (carbonTable != null) {
+      if (carbonTable.hasPreAggregateTables) {
+        throw new UnsupportedOperationException(
+          "Delete operation is not supported for tables which have a pre-aggregate table. Drop " +
+          "pre-aggregate tables to continue.")
+      }
+      if (carbonTable.isPreAggregateTable) {
+        throw new UnsupportedOperationException(
+          "Delete operation is not supported for pre-aggregate table")
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
index e0617d6..b96baff 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
@@ -136,7 +136,7 @@ private[sql] case class CarbonAlterTableRenameCommand(
         alterTableRenameModel,
         newTablePath,
         sparkSession)
-      OperationListenerBus.getInstance().fireEvent(alterTableRenamePreEvent, operationContext)
+      OperationListenerBus.getInstance().fireEvent(alterTableRenamePostEvent, operationContext)
 
       sparkSession.catalog.refreshTable(TableIdentifier(newTableName,
         Some(oldDatabaseName)).quotedString)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
index d17dd11..f698dd4 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionState.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.optimizer.Optimizer
 import org.apache.spark.sql.catalyst.parser.ParserInterface
 import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan, SubqueryAlias}
 import org.apache.spark.sql.execution.SparkOptimizer
-import org.apache.spark.sql.execution.command.preaaggregate.{DropPreAggregateTablePostListener, LoadPostAggregateListener}
+import org.apache.spark.sql.execution.command.preaaggregate._
 import org.apache.spark.sql.execution.datasources._
 import org.apache.spark.sql.execution.strategy.{CarbonLateDecodeStrategy, DDLStrategy, StreamingTableStrategy}
 import org.apache.spark.sql.internal.SQLConf
@@ -35,7 +35,7 @@ import org.apache.spark.sql.parser.CarbonSparkSqlParser
 
 import org.apache.carbondata.core.datamap.DataMapStoreManager
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
-import org.apache.carbondata.events.{DropTablePostEvent, LoadTablePostExecutionEvent, OperationListenerBus}
+import org.apache.carbondata.events._
 
 /**
  * This class will have carbon catalog and refresh the relation from cache if the carbontable in
@@ -132,8 +132,15 @@ object CarbonSessionState {
     OperationListenerBus.getInstance()
       .addListener(classOf[DropTablePostEvent], DropPreAggregateTablePostListener)
       .addListener(classOf[LoadTablePostExecutionEvent], LoadPostAggregateListener)
+      .addListener(classOf[DeleteSegmentByIdPreEvent], PreAggregateDeleteSegmentByIdPreListener)
+      .addListener(classOf[DeleteSegmentByDatePreEvent], PreAggregateDeleteSegmentByDatePreListener)
+      .addListener(classOf[UpdateTablePreEvent], UpdatePreAggregatePreListener)
+      .addListener(classOf[DeleteFromTablePreEvent], DeletePreAggregatePreListener)
+      .addListener(classOf[DeleteFromTablePreEvent], DeletePreAggregatePreListener)
+      .addListener(classOf[AlterTableDropColumnPreEvent], PreAggregateDropColumnPreListener)
+      .addListener(classOf[AlterTableRenamePreEvent], PreAggregateRenameTablePreListener)
+      .addListener(classOf[AlterTableDataTypeChangePreEvent], PreAggregateDataTypeChangePreListener)
   }
-
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index 6f618fe..0274605 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -21,8 +21,11 @@ import java.io.File
 import java.math.{BigDecimal, RoundingMode}
 import java.sql.Timestamp
 
-import org.apache.spark.sql.Row
+import org.apache.spark.sql.{CarbonEnv, Row}
 import org.apache.spark.sql.common.util.Spark2QueryTest
+import org.apache.spark.sql.execution.command.preaaggregate.PreAggregateUtil
+import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.junit.Assert
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
@@ -487,6 +490,22 @@ class AlterTableValidationTestCase extends Spark2QueryTest with BeforeAndAfterAl
     checkExistence(sql("describe formatted specifiedSortColumnsWithAlter"),true,"empno,empname,role,doj")
   }
 
+  test("test to check if new parent table name is reflected in pre-aggregate tables") {
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preaggmain_new")
+    sql("drop table if exists preagg1")
+    sql("create table preaggMain (a string, b string, c string) stored by 'carbondata'")
+    sql(
+      "create table preagg1 stored BY 'carbondata' tblproperties('parent'='PreAggMain') as select" +
+      " a,sum(b) from PreAggMain group by a")
+    intercept[RuntimeException] {
+      sql("alter table preagg1 rename to preagg2")
+    }.getMessage.contains("Rename operation for pre-aggregate table is not supported.")
+    intercept[RuntimeException] {
+      sql("alter table preaggmain rename to preaggmain_new")
+    }.getMessage.contains("Rename operation is not supported for table with pre-aggregate tables")
+  }
+
   override def afterAll {
     sql("DROP TABLE IF EXISTS restructure")
     sql("drop table if exists table1")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
index 9f104ed..67ea21e 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
@@ -147,6 +147,23 @@ class ChangeDataTypeTestCases extends Spark2QueryTest with BeforeAndAfterAll {
     test_change_int_to_long()
   }
 
+  test("test data type change for with pre-aggregate table should throw exception") {
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+    sql("create table preaggMain (a string, b string, c string) stored by 'carbondata'")
+    sql(
+      "create table preagg1 stored BY 'carbondata' tblproperties('parent'='PreAggMain') as select" +
+      " a,sum(b) from PreAggMain group by a")
+    intercept[RuntimeException] {
+      sql("alter table preaggmain drop columns(a)").show
+    }.getMessage.contains("exists in pre-aggregate table")
+    intercept[RuntimeException] {
+      sql("alter table preagg1 drop columns(a)").show
+    }.getMessage.contains("cannot be dropped")
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+  }
+
   override def afterAll {
     sql("DROP TABLE IF EXISTS changedatatypetest")
     sql("DROP TABLE IF EXISTS hivetable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
index 00e4a14..1a1d5d5 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
@@ -98,6 +98,22 @@ class DropColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
     test_drop_and_compaction()
   }
 
+  test("test dropping of column in pre-aggregate should throw exception") {
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+    sql("create table preaggMain (a string, b string, c string) stored by 'carbondata'")
+    sql(
+      "create table preagg1 stored BY 'carbondata' tblproperties('parent'='PreAggMain') as select" +
+      " a,sum(b) from PreAggMain group by a")
+    sql("alter table preaggmain drop columns(c)").show
+    checkExistence(sql("desc table preaggmain"), false, "c")
+    intercept[RuntimeException] {
+      sql("alter table preaggmain drop columns(a)").show
+    }.getMessage.contains("cannot be dropped")
+    sql("drop table if exists preaggMain")
+    sql("drop table if exists preagg1")
+  }
+
   override def afterAll {
     sql("DROP TABLE IF EXISTS dropcolumntest")
     sql("DROP TABLE IF EXISTS hivetable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df41cc66/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
index decb861..c65bcc4 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
@@ -22,12 +22,11 @@ import java.sql.Timestamp
 import java.util.Date
 
 import org.apache.spark.sql.common.util.Spark2QueryTest
-import org.apache.spark.sql.test.TestQueryExecutor
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.api.CarbonStore
 import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+import org.apache.carbondata.core.util.CarbonProperties
 
 class CarbonCommandSuite extends Spark2QueryTest with BeforeAndAfterAll {
 
@@ -142,6 +141,21 @@ class CarbonCommandSuite extends Spark2QueryTest with BeforeAndAfterAll {
     dropTable(table)
   }
 
+  test("test if delete segments by id is unsupported for pre-aggregate tables") {
+    dropTable("preaggMain")
+    dropTable("preagg1")
+    sql("create table preaggMain (a string, b string, c string) stored by 'carbondata'")
+    sql("create table preagg1 stored BY 'carbondata' tblproperties('parent'='PreAggMain') as select a,sum(b) from PreAggMain group by a")
+    intercept[UnsupportedOperationException] {
+      sql("delete from table preaggMain where segment.id in (1,2)")
+    }.getMessage.contains("Delete segment operation is not supported on tables")
+    intercept[UnsupportedOperationException] {
+      sql("delete from table preagg1 where segment.id in (1,2)")
+    }.getMessage.contains("Delete segment operation is not supported on pre-aggregate tables")
+    dropTable("preaggMain")
+    dropTable("preagg1")
+  }
+
   protected def dropTable(tableName: String): Unit ={
     sql(s"DROP TABLE IF EXISTS $tableName")
   }