You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by li...@apache.org on 2020/07/28 01:28:13 UTC

[carbondata] branch master updated: [HOTFIX] Show Segment with stage returns empty

This is an automated email from the ASF dual-hosted git repository.

liuzhi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 9557ba7  [HOTFIX] Show Segment with stage returns empty
9557ba7 is described below

commit 9557ba70472e49ac246ed2549d0ddb8a3e9d95f2
Author: haomarch <ma...@126.com>
AuthorDate: Mon Jul 27 15:35:14 2020 +0800

    [HOTFIX] Show Segment with stage returns empty
    
    Why is this PR needed?
    ListStageFiles function has a bug, leading to the failure of listing stage files
    
    What changes were proposed in this PR?
    The code of judgment has been modified.
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    Yes
    
    This closes #3864
---
 .../scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala  | 3 +++
 .../src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala  | 5 ++++-
 .../src/main/scala/org/apache/carbondata/api/CarbonStore.scala     | 7 ++++---
 3 files changed, 11 insertions(+), 4 deletions(-)

diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
index 5321d05..fabf844 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
@@ -83,6 +83,7 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
       // 1. Test "SHOW SEGMENT ON $tableanme WITH STAGE"
       var rows = sql(s"SHOW SEGMENTS ON $tableName WITH STAGE").collect()
       var unloadedStageCount = CarbonStore.listStageFiles(tableStagePath)._1.length
+      assert(unloadedStageCount > 0)
       assert(rows.length == unloadedStageCount)
       for (index <- 0 until unloadedStageCount) {
         assert(rows(index).getString(0) == null)
@@ -98,6 +99,7 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
 
       // 2. Test "SHOW SEGMENT FOR TABLE $tableanme"
       val rowsfortable = sql(s"SHOW SEGMENTS FOR TABLE $tableName WITH STAGE").collect()
+      assert(rowsfortable.length > 0)
       assert(rowsfortable.length == rows.length)
       for (index <- 0 until unloadedStageCount) {
         assert(rows(index).toString() == rowsfortable(index).toString())
@@ -106,6 +108,7 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
       // 3. Test "SHOW SEGMENT ON $tableanme WITH STAGE AS (QUERY)"
       rows = sql(s"SHOW SEGMENTS ON $tableName WITH STAGE AS " +
         s"(SELECT * FROM $tableName" + "_segments)").collect()
+      assert(rows.length > 0)
       for (index <- 0 until unloadedStageCount) {
         val row = rows(index)
         assert(rows(index).getString(0) == null)
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
index 74c5d94..235421f 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
@@ -258,6 +258,7 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       // 1. Test "SHOW SEGMENT ON $tableanme WITH STAGE"
       var rows = sql(s"SHOW SEGMENTS ON $tableName WITH STAGE").collect()
       var unloadedStageCount = CarbonStore.listStageFiles(stagePath)._1.length
+      assert(unloadedStageCount > 0)
       assert(rows.length == unloadedStageCount)
       for (index <- 0 until unloadedStageCount) {
         assert(rows(index).getString(0) == null)
@@ -273,6 +274,7 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
 
       // 2. Test "SHOW SEGMENT FOR TABLE $tableanme"
       val rowsfortable = sql(s"SHOW SEGMENTS FOR TABLE $tableName WITH STAGE").collect()
+      assert(rowsfortable.length > 0)
       assert(rowsfortable.length == rows.length)
       for (index <- 0 until unloadedStageCount) {
         assert(rows(index).toString() == rowsfortable(index).toString())
@@ -281,7 +283,8 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       // 3. Test "SHOW SEGMENT ON $tableanme WITH STAGE AS (QUERY)"
       rows = sql(s"SHOW SEGMENTS ON $tableName WITH STAGE AS " +
         s"(SELECT * FROM $tableName" + "_segments)").collect()
-      for (index <- 0 until unloadedStageCount) {
+      assert(rows.length > 0)
+      for (index <- 0 until rows.length) {
         val row = rows(index)
         assert(rows(index).getString(0) == null)
         assert(rows(index).getString(1).equals("Unload"))
diff --git a/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala b/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
index 7de86e9..6f259b0 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
@@ -125,6 +125,7 @@ object CarbonStore {
     if (dir.exists()) {
       // 1. List all files in the stage dictionary.
       val allFiles = dir.listFiles()
+      val allFileNames = allFiles.map(file => file.getName)
 
       // 2. Get StageFile list.
       // Firstly, get the stage files in the stage dictionary.
@@ -135,17 +136,17 @@ object CarbonStore {
       }.filterNot { file =>
         file.getName.endsWith(CarbonTablePath.LOADING_FILE_SUFFIX)
       }.filter { file =>
-        allFiles.contains(file.getName + CarbonTablePath.SUCCESS_FILE_SUFFIX)
+        allFileNames.contains(file.getName + CarbonTablePath.SUCCESS_FILE_SUFFIX)
       }.sortWith {
         (file1, file2) => file1.getLastModifiedTime > file2.getLastModifiedTime
       }
       // 3. Get the unloaded stage files, which haven't loading tag.
       val unloadedFiles = stageFiles.filterNot { file =>
-        allFiles.contains(file.getName + CarbonTablePath.LOADING_FILE_SUFFIX)
+        allFileNames.contains(file.getName + CarbonTablePath.LOADING_FILE_SUFFIX)
       }
       // 4. Get the loading stage files, which have loading tag.
       val loadingFiles = stageFiles.filter { file =>
-        allFiles.contains(file.getName + CarbonTablePath.LOADING_FILE_SUFFIX)
+        allFileNames.contains(file.getName + CarbonTablePath.LOADING_FILE_SUFFIX)
       }
       (unloadedFiles, loadingFiles)
     } else {