You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ku...@apache.org on 2020/08/19 04:28:50 UTC

[carbondata] branch master updated: [CARBONDATA-3927]Remove unwanted fields from tupleID to make it short and to improve store size and performance

This is an automated email from the ASF dual-hosted git repository.

kunalkapoor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 0298273  [CARBONDATA-3927]Remove unwanted fields from tupleID to make it short and to improve store size and performance
0298273 is described below

commit 02982739ed6a472e172b9df13912bf15f3a00488
Author: akashrn5 <ak...@gmail.com>
AuthorDate: Sun Jul 12 21:23:23 2020 +0530

    [CARBONDATA-3927]Remove unwanted fields from tupleID to make it
    short and to improve store size and performance
    
    Why is this PR needed?
    Currently, we store the tupleId which is very long, which
    increases the store size and reduces query performance.
    
    What changes were proposed in this PR?
    Remove the compressor name, part id and batch number from the tupleID.
    This helps to improve the store size and improve the query performance also.
    
    part_0 is not required as it will be same for all
    _batchno is not required as common word, can be replaced by -
    compressor name not required as it does not plan any role.
    
    This closes #3837
---
 .../carbondata/core/mutate/CarbonUpdateUtil.java   | 39 +++++++++++---
 .../apache/carbondata/core/mutate/TupleIdEnum.java | 18 ++++---
 .../apache/carbondata/core/util/CarbonUtil.java    | 12 +++--
 .../carbondata/core/util/path/CarbonTablePath.java | 38 +++++++++++---
 .../carbondata/core/util/CarbonUtilTest.java       | 28 ++++++++++
 .../command/management/CommonLoadUtils.scala       |  7 ++-
 .../command/mutation/DeleteExecution.scala         | 60 ++++++++++++++++------
 .../testsuite/iud/DeleteCarbonTableTestCase.scala  | 26 +++++-----
 .../spark/carbondata/query/SubQueryTestSuite.scala |  2 +-
 9 files changed, 173 insertions(+), 57 deletions(-)

diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java b/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
index ec7894d..f43a5dc 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/CarbonUpdateUtil.java
@@ -84,7 +84,16 @@ public class CarbonUpdateUtil {
    */
   public static String getSegmentWithBlockFromTID(String Tid, boolean isPartitionTable) {
     if (isPartitionTable) {
-      return getRequiredFieldFromTID(Tid, TupleIdEnum.SEGMENT_ID);
+      return getRequiredFieldFromTID(Tid, TupleIdEnum.PARTITION_SEGMENT_ID);
+    }
+    // this case is to check for add segment case, as now the segment id is present at first index,
+    // in add segment case, it will be in second index as the blockletID is generated by adding the
+    // complete external path
+    // this is in case of the external segment, where the tuple id has external path with #
+    if (Tid.contains("#")) {
+      return getRequiredFieldFromTID(Tid, TupleIdEnum.EXTERNAL_SEGMENT_ID)
+          + CarbonCommonConstants.FILE_SEPARATOR + getRequiredFieldFromTID(Tid,
+          TupleIdEnum.EXTERNAL_BLOCK_ID);
     }
     return getRequiredFieldFromTID(Tid, TupleIdEnum.SEGMENT_ID)
         + CarbonCommonConstants.FILE_SEPARATOR + getRequiredFieldFromTID(Tid, TupleIdEnum.BLOCK_ID);
@@ -93,11 +102,17 @@ public class CarbonUpdateUtil {
   /**
    * Returns block path from tuple id
    */
-  public static String getTableBlockPath(String tid, String tablePath, boolean isStandardTable) {
-    String partField = getRequiredFieldFromTID(tid, TupleIdEnum.PART_ID);
+  public static String getTableBlockPath(String tid, String tablePath, boolean isStandardTable,
+      boolean isPartitionTable) {
+    String partField = "0";
     // If it has segment file then part field can be appended directly to table path
     if (!isStandardTable) {
-      return tablePath + CarbonCommonConstants.FILE_SEPARATOR + partField.replace("#", "/");
+      if (isPartitionTable) {
+        partField = getRequiredFieldFromTID(tid, TupleIdEnum.PARTITION_PART_ID);
+        return tablePath + CarbonCommonConstants.FILE_SEPARATOR + partField.replace("#", "/");
+      } else {
+        return tablePath;
+      }
     }
     String part = CarbonTablePath.addPartPrefix(partField);
     String segment =
@@ -941,13 +956,21 @@ public class CarbonUpdateUtil {
    */
   public static String getSegmentBlockNameKey(String segID, String blockName,
       boolean isPartitionTable) {
-    String blockNameWithOutPart = blockName
+    String blockNameWithOutPartAndBatchNo = blockName
         .substring(blockName.indexOf(CarbonCommonConstants.HYPHEN) + 1,
-            blockName.lastIndexOf(CarbonTablePath.getCarbonDataExtension()));
+            blockName.lastIndexOf(CarbonTablePath.getCarbonDataExtension()))
+        .replace(CarbonTablePath.BATCH_PREFIX, CarbonCommonConstants.UNDERSCORE);
+    // to remove compressor name
+    int index = blockNameWithOutPartAndBatchNo.lastIndexOf(CarbonCommonConstants.POINT);
+    if (index != -1) {
+      blockNameWithOutPartAndBatchNo = blockNameWithOutPartAndBatchNo
+          .replace(blockNameWithOutPartAndBatchNo.substring(index), "");
+    }
     if (isPartitionTable) {
-      return blockNameWithOutPart;
+      return blockNameWithOutPartAndBatchNo;
+    } else {
+      return segID + CarbonCommonConstants.FILE_SEPARATOR + blockNameWithOutPartAndBatchNo;
     }
-    return segID + CarbonCommonConstants.FILE_SEPARATOR + blockNameWithOutPart;
   }
 
   /**
diff --git a/core/src/main/java/org/apache/carbondata/core/mutate/TupleIdEnum.java b/core/src/main/java/org/apache/carbondata/core/mutate/TupleIdEnum.java
index 1c5d679..b1b9f22 100644
--- a/core/src/main/java/org/apache/carbondata/core/mutate/TupleIdEnum.java
+++ b/core/src/main/java/org/apache/carbondata/core/mutate/TupleIdEnum.java
@@ -21,12 +21,18 @@ package org.apache.carbondata.core.mutate;
  * Enum class for tupleID.
  */
 public enum TupleIdEnum {
-  PART_ID(0),
-  SEGMENT_ID(1),
-  BLOCK_ID(2),
-  BLOCKLET_ID(3),
-  PAGE_ID(4),
-  OFFSET(5);
+  PARTITION_PART_ID(0),
+  SEGMENT_ID(0),
+  EXTERNAL_SEGMENT_ID(1),
+  PARTITION_SEGMENT_ID(1),
+  BLOCK_ID(1),
+  EXTERNAL_BLOCK_ID(2),
+  BLOCKLET_ID(2),
+  PAGE_ID(3),
+  OFFSET(4),
+  EXTERNAL_BLOCKLET_ID(3),
+  EXTERNAL_PAGE_ID(4),
+  EXTERNAL_OFFSET(5);
 
   private int index;
 
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index f32c6c9..32be743 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2782,10 +2782,14 @@ public final class CarbonUtil {
         } else {
           // Replace / with # on partition director to support multi level partitioning. And access
           // them all as a single entity.
-          blockId =
-              partitionDir.replace(CarbonCommonConstants.FILE_SEPARATOR, "#")
-                  + CarbonCommonConstants.FILE_SEPARATOR + segmentId
-                  + CarbonCommonConstants.FILE_SEPARATOR + blockName;
+          if (partitionDir.isEmpty()) {
+            blockId = segmentId + CarbonCommonConstants.FILE_SEPARATOR + blockName;
+          } else {
+            blockId = partitionDir.replace(CarbonCommonConstants.FILE_SEPARATOR, "#")
+                + CarbonCommonConstants.FILE_SEPARATOR + segmentId
+                + CarbonCommonConstants.FILE_SEPARATOR + blockName;
+          }
+
         }
       }
     } else {
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
index 56d407d..da7403b 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
@@ -44,7 +44,7 @@ public class CarbonTablePath {
   private static final String SEGMENT_PREFIX = "Segment_";
   private static final String PARTITION_PREFIX = "Part";
   private static final String DATA_PART_PREFIX = "part-";
-  private static final String BATCH_PREFIX = "_batchno";
+  public static final String BATCH_PREFIX = "_batchno";
   private static final String LOCK_DIR = "LockFiles";
 
   public static final String TABLE_STATUS_FILE = "tablestatus";
@@ -635,10 +635,23 @@ public class CarbonTablePath {
    * @return shortBlockId
    */
   public static String getShortBlockId(String blockId) {
-    return blockId.replace(PARTITION_PREFIX, "")
-            .replace(SEGMENT_PREFIX, "")
-            .replace(DATA_PART_PREFIX, "")
-            .replace(CARBON_DATA_EXT, "");
+    String blockIdWithCompressorName =
+        blockId.replace(PARTITION_PREFIX + "0" + CarbonCommonConstants.FILE_SEPARATOR, "")
+            .replace(SEGMENT_PREFIX, "").replace(BATCH_PREFIX, CarbonCommonConstants.UNDERSCORE)
+            .replace(DATA_PART_PREFIX, "").replace(CARBON_DATA_EXT, "");
+    // to remove compressor name
+    if (!blockId.equalsIgnoreCase(blockIdWithCompressorName)) {
+      int index = blockIdWithCompressorName.lastIndexOf(".");
+      if (index != -1) {
+        String replace =
+            blockIdWithCompressorName.replace(blockIdWithCompressorName.substring(index), "");
+        return replace;
+      } else {
+        return blockIdWithCompressorName;
+      }
+    } else {
+      return blockIdWithCompressorName;
+    }
   }
 
   /**
@@ -648,8 +661,19 @@ public class CarbonTablePath {
    * @return shortBlockId
    */
   public static String getShortBlockIdForPartitionTable(String blockId) {
-    return blockId.replace(DATA_PART_PREFIX, "")
-        .replace(CARBON_DATA_EXT, "");
+    String blockIdWithCompressorName = blockId.replace(DATA_PART_PREFIX, "")
+        .replace(BATCH_PREFIX, CarbonCommonConstants.UNDERSCORE).replace(CARBON_DATA_EXT, "");
+    // to remove compressor name
+    if (!blockId.equalsIgnoreCase(blockIdWithCompressorName)) {
+      int index = blockIdWithCompressorName.lastIndexOf(POINT);
+      if (index != -1) {
+        return blockIdWithCompressorName.replace(blockIdWithCompressorName.substring(index), "");
+      } else {
+        return blockIdWithCompressorName;
+      }
+    } else {
+      return blockIdWithCompressorName;
+    }
   }
 
   /**
diff --git a/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
index 28a70ba..fd98673 100644
--- a/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
@@ -43,7 +43,9 @@ import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.mutate.CarbonUpdateUtil;
 import org.apache.carbondata.core.scan.model.ProjectionDimension;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import mockit.Mock;
 import mockit.MockUp;
@@ -919,6 +921,32 @@ public class CarbonUtilTest {
     Assert.assertTrue(schemaString.length() > schema.length());
   }
 
+  @Test
+  public void testTupeIDInUpdateScenarios() {
+    // Test CarbonTablePath.java
+    String blockId = "Part0/Segment_0/part-0-0_batchno0-0-0-1597409791503.snappy.carbondata";
+    Assert.assertEquals(CarbonTablePath.getShortBlockId(blockId), "0/0-0_0-0-0-1597409791503");
+    blockId = "c3=aa/part-0-100100000100001_batchno0-0-0-1597411003332.snappy.carbondata";
+    Assert.assertEquals(CarbonTablePath.getShortBlockIdForPartitionTable(blockId), "c3=aa/0-100100000100001_0-0-0-1597411003332");
+    // external segment case
+    blockId = "#home#root1#Projects#carbondata#integration#spark#target#warehouse#addsegtest#/Segment_2/part-0-0_batchno0-0-1-1597411388431.snappy.carbondata";
+    Assert.assertEquals(CarbonTablePath.getShortBlockId(blockId), "#home#root1#Projects#carbondata#integration#spark#target#warehouse#addsegtest#/2/0-0_0-0-1-1597411388431");
+    // standard table case
+    String TID = "0/0-0_0-0-0-1597411901991/0/0/0";
+    Assert.assertEquals(CarbonUpdateUtil.getSegmentWithBlockFromTID(TID, false), "0/0-0_0-0-0-1597411901991");
+    // partition table
+    TID = "c3=aa/0-100100000100001_0-0-0-1597412090158/0/0/0";
+    Assert.assertEquals(CarbonUpdateUtil.getSegmentWithBlockFromTID(TID, true), "0-100100000100001_0-0-0-1597412090158");
+    // external segment case
+    TID = "#home#root1#Projects#carbondata#integration#spark#target#warehouse#addsegtest#/2/0-0_0-0-1-1597412329342/0/0/0";
+    Assert.assertEquals(CarbonUpdateUtil.getSegmentWithBlockFromTID(TID, false), "2/0-0_0-0-1-1597412329342");
+    String blockName = "part-0-0_batchno0-0-0-1597412488102.snappy.carbondata";
+    // non partition table
+    Assert.assertEquals(CarbonUpdateUtil.getSegmentBlockNameKey("0", blockName, false), "0/0-0_0-0-0-1597412488102");
+    // partition table
+    Assert.assertEquals(CarbonUpdateUtil.getSegmentBlockNameKey("0", blockName, true), "0-0_0-0-0-1597412488102");
+  }
+
   private String generateString(int length) {
     StringBuilder builder = new StringBuilder();
     for (int i = 0; i < length; i++) {
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala
index 15428fa..f574e12 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala
@@ -89,7 +89,12 @@ object CommonLoadUtils {
     import org.apache.spark.sql.functions.udf
     // extracting only segment from tupleId
     val getSegIdUDF = udf((tupleId: String) =>
-      CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.SEGMENT_ID))
+      // this is in case of the external segment, where the tuple id has external path with #
+      if (tupleId.contains("#")) {
+        CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.EXTERNAL_SEGMENT_ID)
+      } else {
+        CarbonUpdateUtil.getRequiredFieldFromTID(tupleId, TupleIdEnum.SEGMENT_ID)
+      })
     // getting all fields except tupleId field as it is not required in the value
     val otherFields = CarbonScalaUtil.getAllFieldsWithoutTupleIdField(fields)
     // extract tupleId field which will be used as a key
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
index 5dbc333..d079157 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
@@ -35,6 +35,7 @@ import org.apache.spark.sql.util.SparkSQLUtil
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.compression.CompressorFactory
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.index.Segment
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
@@ -190,7 +191,7 @@ object DeleteExecution {
                        isStandardTable,
                        metadataDetails
                          .find(_.getLoadName.equalsIgnoreCase(blockDetails.get(key)))
-                         .get, carbonTable.isHivePartitionTable)
+                         .get, carbonTable)
           }
           result
         }).collect()
@@ -201,20 +202,20 @@ object DeleteExecution {
         timestamp: String,
         rowCountDetailsVO: RowCountDetailsVO,
         isStandardTable: Boolean,
-        load: LoadMetadataDetails, isPartitionTable: Boolean
+        load: LoadMetadataDetails, carbonTable: CarbonTable
     ): Iterator[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors, Long))] = {
 
       val result = new DeleteDeltaResultImpl()
       var deleteStatus = SegmentStatus.LOAD_FAILURE
       val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
       // here key = segment/blockName
-      val blockName = if (isPartitionTable) {
-        CarbonUpdateUtil.getBlockName(CarbonTablePath.addDataPartPrefix(key))
+      var blockName = if (carbonTable.isHivePartitionTable) {
+        key
       } else {
-        CarbonUpdateUtil
-          .getBlockName(
-            CarbonTablePath.addDataPartPrefix(key.split(CarbonCommonConstants.FILE_SEPARATOR)(1)))
+        key.split(CarbonCommonConstants.FILE_SEPARATOR)(1)
       }
+      blockName = blockName.replace(CarbonCommonConstants.UNDERSCORE, CarbonTablePath.BATCH_PREFIX)
+      blockName = CarbonUpdateUtil.getBlockName(CarbonTablePath.addDataPartPrefix(blockName))
       val deleteDeltaBlockDetails: DeleteDeltaBlockDetails = new DeleteDeltaBlockDetails(blockName)
       val resultIter =
         new Iterator[(SegmentStatus, (SegmentUpdateDetails, ExecutionErrors, Long))] {
@@ -226,13 +227,19 @@ object DeleteExecution {
             val oneRow = iter.next
             TID = oneRow
               .get(oneRow.fieldIndex(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID)).toString
-            val (offset, blockletId, pageId) = if (isPartitionTable) {
+            val (offset, blockletId, pageId) = if (carbonTable.isHivePartitionTable) {
               (CarbonUpdateUtil.getRequiredFieldFromTID(TID,
-                TupleIdEnum.OFFSET.getTupleIdIndex - 1),
+                TupleIdEnum.OFFSET.getTupleIdIndex),
                 CarbonUpdateUtil.getRequiredFieldFromTID(TID,
-                  TupleIdEnum.BLOCKLET_ID.getTupleIdIndex - 1),
+                  TupleIdEnum.BLOCKLET_ID.getTupleIdIndex),
+                Integer.parseInt(CarbonUpdateUtil.getRequiredFieldFromTID(TID,
+                  TupleIdEnum.PAGE_ID.getTupleIdIndex)))
+            } else if (TID.contains("#")) {
+              // this is in case of the external segment, where the tuple id has external path with#
+              (CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.EXTERNAL_OFFSET),
+                CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.EXTERNAL_BLOCKLET_ID),
                 Integer.parseInt(CarbonUpdateUtil.getRequiredFieldFromTID(TID,
-                  TupleIdEnum.PAGE_ID.getTupleIdIndex - 1)))
+                  TupleIdEnum.EXTERNAL_PAGE_ID)))
             } else {
               (CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.OFFSET),
                 CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.BLOCKLET_ID),
@@ -253,18 +260,39 @@ object DeleteExecution {
             if (StringUtils.isNotEmpty(load.getPath)) {
               load.getPath
             } else {
-              CarbonUpdateUtil.getTableBlockPath(TID, tablePath, isStandardTable)
+              CarbonUpdateUtil.getTableBlockPath(TID,
+                tablePath,
+                isStandardTable,
+                carbonTable.isHivePartitionTable)
             }
-          val completeBlockName = if (isPartitionTable) {
+
+          // get the compressor name
+          var columnCompressor: String = carbonTable.getTableInfo
+            .getFactTable
+            .getTableProperties
+            .get(CarbonCommonConstants.COMPRESSOR)
+          if (null == columnCompressor) {
+            columnCompressor = CompressorFactory.getInstance.getCompressor.getName
+          }
+          var blockNameFromTupleID =
+            if (TID.contains("#")) {
+              CarbonUpdateUtil.getRequiredFieldFromTID(TID,
+                TupleIdEnum.EXTERNAL_BLOCK_ID)
+            } else {
+              CarbonUpdateUtil.getRequiredFieldFromTID(TID,
+                TupleIdEnum.BLOCK_ID)
+            }
+          blockNameFromTupleID = blockNameFromTupleID.replace(CarbonCommonConstants.UNDERSCORE,
+            CarbonTablePath.BATCH_PREFIX)
+          val completeBlockName = if (carbonTable.isHivePartitionTable) {
             CarbonTablePath
               .addDataPartPrefix(
-                CarbonUpdateUtil.getRequiredFieldFromTID(TID,
-                  TupleIdEnum.BLOCK_ID.getTupleIdIndex - 1) +
+                blockNameFromTupleID + CarbonCommonConstants.POINT + columnCompressor +
                 CarbonCommonConstants.FACT_FILE_EXT)
           } else {
             CarbonTablePath
               .addDataPartPrefix(
-                CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.BLOCK_ID) +
+                blockNameFromTupleID + CarbonCommonConstants.POINT + columnCompressor +
                 CarbonCommonConstants.FACT_FILE_EXT)
           }
           val deleteDeltaPath = CarbonUpdateUtil
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
index 1619dfc..a503cef 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
@@ -257,29 +257,29 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
 
     val dataframe_part = sql("select getTupleId() as tupleId from iud_db.dest_tuple_part").collect()
     val listOfTupleId_part = dataframe_part.map(df => df.get(0).toString).sorted
-    assert(listOfTupleId_part(0).startsWith("c3=aa/0-100100000100001_batchno0-0-0-") &&
+    assert(listOfTupleId_part(0).startsWith("c3=aa/0-100100000100001_0-0-0-") &&
            listOfTupleId_part(0).endsWith("/0/0/0"))
-    assert(listOfTupleId_part(1).startsWith("c3=bb/0-100100000100002_batchno0-0-0-") &&
+    assert(listOfTupleId_part(1).startsWith("c3=bb/0-100100000100002_0-0-0-") &&
            listOfTupleId_part(1).endsWith("/0/0/0"))
-    assert(listOfTupleId_part(2).startsWith("c3=cc/0-100100000100003_batchno0-0-0-") &&
+    assert(listOfTupleId_part(2).startsWith("c3=cc/0-100100000100003_0-0-0-") &&
            listOfTupleId_part(2).endsWith("/0/0/0"))
-    assert(listOfTupleId_part(3).startsWith("c3=dd/0-100100000100004_batchno0-0-0-") &&
+    assert(listOfTupleId_part(3).startsWith("c3=dd/0-100100000100004_0-0-0-") &&
            listOfTupleId_part(3).endsWith("/0/0/0"))
-    assert(listOfTupleId_part(4).startsWith("c3=ee/0-100100000100005_batchno0-0-0-") &&
+    assert(listOfTupleId_part(4).startsWith("c3=ee/0-100100000100005_0-0-0-") &&
            listOfTupleId_part(4).endsWith("/0/0/0"))
 
     val dataframe = sql("select getTupleId() as tupleId from iud_db.dest_tuple")
     val listOfTupleId = dataframe.collect().map(df => df.get(0).toString).sorted
     assert(
-      listOfTupleId(0).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(0).endsWith("/0/0/0"))
+      listOfTupleId(0).contains("0/0-0_0-0-0-") && listOfTupleId(0).endsWith("/0/0/0"))
     assert(
-      listOfTupleId(1).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(1).endsWith("/0/0/1"))
+      listOfTupleId(1).contains("0/0-0_0-0-0-") && listOfTupleId(1).endsWith("/0/0/1"))
     assert(
-      listOfTupleId(2).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(2).endsWith("/0/0/2"))
+      listOfTupleId(2).contains("0/0-0_0-0-0-") && listOfTupleId(2).endsWith("/0/0/2"))
     assert(
-      listOfTupleId(3).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(3).endsWith("/0/0/3"))
+      listOfTupleId(3).contains("0/0-0_0-0-0-") && listOfTupleId(3).endsWith("/0/0/3"))
     assert(
-      listOfTupleId(4).contains("0/0/0-0_batchno0-0-0-") && listOfTupleId(4).endsWith("/0/0/4"))
+      listOfTupleId(4).contains("0/0-0_0-0-0-") && listOfTupleId(4).endsWith("/0/0/4"))
 
     val carbonTable_part = CarbonEnv.getInstance(SparkTestQueryExecutor.spark).carbonMetaStore
       .lookupRelation(Option("iud_db"), "dest_tuple_part")(SparkTestQueryExecutor.spark)
@@ -306,16 +306,14 @@ class DeleteCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
       carbonTable.isTransactionalTable,
       CarbonUtil.isStandardCarbonTable(carbonTable))
     assert(blockId_part.startsWith("Part0/Segment_0/part-0-100100000100001_batchno0-0-0-"))
-    val segment = Segment.getSegment("0", carbonTable.getTablePath)
     val tableBlockPath = CarbonUpdateUtil
       .getTableBlockPath(listOfTupleId(0),
         carbonTable.getTablePath,
-        CarbonUtil.isStandardCarbonTable(carbonTable))
-    val segment_part = Segment.getSegment("0", carbonTable_part.getTablePath)
+        CarbonUtil.isStandardCarbonTable(carbonTable), true)
     val tableBl0ckPath_part = CarbonUpdateUtil
       .getTableBlockPath(listOfTupleId_part(0),
         carbonTable_part.getTablePath,
-        CarbonUtil.isStandardCarbonTable(carbonTable_part))
+        CarbonUtil.isStandardCarbonTable(carbonTable_part), true)
     assert(tableBl0ckPath_part.endsWith("iud_db.db/dest_tuple_part/c3=aa"))
     assert(tableBlockPath.endsWith("iud_db.db/dest_tuple/Fact/Part0/Segment_0"))
 
diff --git a/integration/spark/src/test/scala/org/apache/spark/carbondata/query/SubQueryTestSuite.scala b/integration/spark/src/test/scala/org/apache/spark/carbondata/query/SubQueryTestSuite.scala
index f2b2d8a..421e8e9 100644
--- a/integration/spark/src/test/scala/org/apache/spark/carbondata/query/SubQueryTestSuite.scala
+++ b/integration/spark/src/test/scala/org/apache/spark/carbondata/query/SubQueryTestSuite.scala
@@ -56,7 +56,7 @@ class SubQueryTestSuite extends QueryTest with BeforeAndAfterAll {
   }
 
   test("tupleId") {
-    checkExistence(sql("select getTupleId() as tupleId from subquery"), true, "0/0/0-0_batchno0-0-")
+    checkExistence(sql("select getTupleId() as tupleId from subquery"), true, "0/0-0_0-0-")
   }
 
   override def afterAll() {