You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@carbondata.apache.org by GitBox <gi...@apache.org> on 2021/02/02 11:30:34 UTC

[GitHub] [carbondata] ajantha-bhat commented on a change in pull request #4086: [CARBONDATA-4115] Successful load and insert will return segment ID

ajantha-bhat commented on a change in pull request #4086:
URL: https://github.com/apache/carbondata/pull/4086#discussion_r568528970



##########
File path: integration/spark/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
##########
@@ -82,6 +83,43 @@ class CarbonCommandSuite extends QueryTest with BeforeAndAfterAll {
        """.stripMargin)
   }
 
+  protected def createTestTable(tableName: String): Unit = {
+    sql(
+      s"""

Review comment:
       Instead of adding new testcase ,
   In one of the existing testcases of **loading, insert into, partition table loading, partition table insert into**, just add a validation for segment id
   

##########
File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
##########
@@ -276,7 +280,15 @@ case class CarbonInsertIntoCommand(databaseNameOp: Option[String],
         }
         throw ex
     }
-    Seq.empty
+    if(loadResultForReturn!=null && loadResultForReturn.getLoadName!=null) {
+      Seq(Row(loadResultForReturn.getLoadName))
+    } else {
+      rowsForReturn

Review comment:
       why are you returning number of rows instead of segment id here ? when will the code enter here  when load is success ?
   can you add some comment ?

##########
File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
##########
@@ -276,7 +280,15 @@ case class CarbonInsertIntoCommand(databaseNameOp: Option[String],
         }
         throw ex
     }
-    Seq.empty
+    if(loadResultForReturn!=null && loadResultForReturn.getLoadName!=null) {

Review comment:
       I think code is not formatted, 
   we follow space after if and != 

##########
File path: integration/spark/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
##########
@@ -100,6 +138,56 @@ class CarbonCommandSuite extends QueryTest with BeforeAndAfterAll {
   private lazy val location = CarbonProperties.getStorePath()
 
 
+  test("Return segment ID after load and insert") {
+    val tableName = "test_table"
+    val inputTableName = "csv_table"
+    val inputPath = s"$resourcesPath/data_alltypes.csv"
+    dropTable(tableName)
+    dropTable(inputTableName)
+    createAndLoadInputTable(inputTableName, inputPath)
+    createTestTable(tableName)
+    checkAnswer(sql(
+      s"""
+         | INSERT INTO TABLE $tableName
+         | SELECT shortField, intField, bigintField, doubleField, stringField,
+         | from_unixtime(unix_timestamp(timestampField,'yyyy/M/dd')) timestampField, decimalField,
+         | cast(to_date(from_unixtime(unix_timestamp(dateField,'yyyy/M/dd'))) as date), charField
+         | FROM $inputTableName
+       """.stripMargin), Seq(Row("0")))
+    checkAnswer(sql(
+      s"LOAD DATA LOCAL INPATH '$inputPath'" +
+      s" INTO TABLE $tableName" +
+      " OPTIONS('FILEHEADER'=" +
+      "'shortField,intField,bigintField,doubleField,stringField," +
+      "timestampField,decimalField,dateField,charField')"), Seq(Row("1")))

Review comment:
       possible to return text like "Successfully loaded to segment id : 1" instead of returning just "1"

##########
File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
##########
@@ -191,7 +196,15 @@ case class CarbonLoadDataCommand(databaseNameOp: Option[String],
           throw ex
         }
     }
-    Seq.empty
+    if(loadResultForReturn!=null && loadResultForReturn.getLoadName!=null) {

Review comment:
       @QiangCai, @ydvpankaj99  : why our checkstyle is not catching this format issues ?

##########
File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
##########
@@ -191,7 +196,15 @@ case class CarbonLoadDataCommand(databaseNameOp: Option[String],
           throw ex
         }
     }
-    Seq.empty
+    if(loadResultForReturn!=null && loadResultForReturn.getLoadName!=null) {

Review comment:
       @QiangCai, @ydvpankaj99  : why our checkstyle is not catching this format issues ?

##########
File path: integration/spark/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
##########
@@ -100,6 +138,56 @@ class CarbonCommandSuite extends QueryTest with BeforeAndAfterAll {
   private lazy val location = CarbonProperties.getStorePath()
 
 
+  test("Return segment ID after load and insert") {
+    val tableName = "test_table"
+    val inputTableName = "csv_table"
+    val inputPath = s"$resourcesPath/data_alltypes.csv"
+    dropTable(tableName)
+    dropTable(inputTableName)
+    createAndLoadInputTable(inputTableName, inputPath)
+    createTestTable(tableName)
+    checkAnswer(sql(
+      s"""
+         | INSERT INTO TABLE $tableName
+         | SELECT shortField, intField, bigintField, doubleField, stringField,
+         | from_unixtime(unix_timestamp(timestampField,'yyyy/M/dd')) timestampField, decimalField,
+         | cast(to_date(from_unixtime(unix_timestamp(dateField,'yyyy/M/dd'))) as date), charField
+         | FROM $inputTableName
+       """.stripMargin), Seq(Row("0")))
+    checkAnswer(sql(
+      s"LOAD DATA LOCAL INPATH '$inputPath'" +
+      s" INTO TABLE $tableName" +
+      " OPTIONS('FILEHEADER'=" +
+      "'shortField,intField,bigintField,doubleField,stringField," +
+      "timestampField,decimalField,dateField,charField')"), Seq(Row("1")))
+
+  }
+
+  test("Return segment ID after load and insert when auto merge enabled") {

Review comment:
       same comment as above

##########
File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
##########
@@ -276,7 +280,15 @@ case class CarbonInsertIntoCommand(databaseNameOp: Option[String],
         }
         throw ex
     }
-    Seq.empty
+    if(loadResultForReturn!=null && loadResultForReturn.getLoadName!=null) {
+      Seq(Row(loadResultForReturn.getLoadName))
+    } else {
+      rowsForReturn

Review comment:
       partition table case, what will be rowsForReturn?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org