You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by ja...@apache.org on 2021/11/04 11:50:29 UTC

[iotdb] branch new_vector updated (9109e1f -> 9e88c16)

This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a change to branch new_vector
in repository https://gitbox.apache.org/repos/asf/iotdb.git.


    from 9109e1f  replace IChunkWriter with ChunkWriterImpl (#4317)
     new 5c5bb20  change some
     new 33838ee  Merge branch 'new_vector' of https://github.com/apache/iotdb into new_vector
     new 9e88c16  support select only one series in vector and delete useless functions in IChunkWriter interface

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../iotdb/AlignedTimeseriesSessionExample.java     |  44 +++---
 .../db/engine/cache/TimeSeriesMetadataCache.java   | 171 ---------------------
 .../inner/utils/InnerSpaceCompactionUtils.java     |   3 +-
 .../engine/memtable/AlignedWritableMemChunk.java   |  19 ++-
 .../db/engine/memtable/IWritableMemChunk.java      |   1 -
 .../iotdb/db/engine/memtable/WritableMemChunk.java |  16 +-
 .../iotdb/db/query/reader/chunk/MemPageReader.java |  10 +-
 .../apache/iotdb/db/tools/TsFileRewriteTool.java   |  12 +-
 .../org/apache/iotdb/db/utils/FileLoaderUtils.java |  51 +++---
 .../java/org/apache/iotdb/db/utils/MergeUtils.java |  35 ++---
 .../utils/CompactionFileGeneratorUtils.java        |   9 +-
 .../tsfile/write/chunk/AlignedChunkWriterImpl.java |  48 ------
 .../tsfile/write/chunk/ChunkGroupWriterImpl.java   |  23 ++-
 .../iotdb/tsfile/write/chunk/ChunkWriterImpl.java  |  41 ++---
 .../iotdb/tsfile/write/chunk/IChunkWriter.java     |  45 ------
 .../write/record/datapoint/BooleanDataPoint.java   |   6 +-
 .../tsfile/write/record/datapoint/DataPoint.java   |   4 +-
 .../write/record/datapoint/DoubleDataPoint.java    |   6 +-
 .../write/record/datapoint/FloatDataPoint.java     |   6 +-
 .../write/record/datapoint/IntDataPoint.java       |   6 +-
 .../write/record/datapoint/LongDataPoint.java      |   6 +-
 .../write/record/datapoint/StringDataPoint.java    |   6 +-
 .../write/writer/RestorableTsFileIOWriter.java     |   6 +-
 23 files changed, 140 insertions(+), 434 deletions(-)

[iotdb] 01/03: change some

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch new_vector
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 5c5bb200fdd3724fafae1a7e0d54c6b16a722536
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Thu Nov 4 16:19:19 2021 +0800

    change some
---
 .../iotdb/AlignedTimeseriesSessionExample.java     | 40 +++++++++++-----------
 .../iotdb/db/query/reader/chunk/MemPageReader.java | 13 ++-----
 .../write/writer/RestorableTsFileIOWriter.java     |  6 ++--
 3 files changed, 25 insertions(+), 34 deletions(-)

diff --git a/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java b/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java
index c7a7fa7..3e95a55 100644
--- a/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java
+++ b/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java
@@ -41,7 +41,7 @@ import java.util.Map;
 public class AlignedTimeseriesSessionExample {
 
   private static Session session;
-  private static final String ROOT_SG1_D1_VECTOR1 = "root.sg_1.d1.vector";
+  private static final String ROOT_SG1_D1_VECTOR1 = "root.sg_1.d1";
   private static final String ROOT_SG1_D1_VECTOR2 = "root.sg_1.d1.vector2";
   private static final String ROOT_SG1_D1_VECTOR3 = "root.sg_1.d1.vector3";
   private static final String ROOT_SG2_D1_VECTOR4 = "root.sg_2.d1.vector4";
@@ -58,42 +58,42 @@ public class AlignedTimeseriesSessionExample {
     // set session fetchSize
     session.setFetchSize(10000);
 
-    createTemplate();
+    //    createTemplate();
     createAlignedTimeseries();
 
     insertAlignedRecord();
-    insertAlignedRecords();
-    insertAlignedRecordsOfOneDevices();
+    //    insertAlignedRecords();
+    //    insertAlignedRecordsOfOneDevices();
 
-    insertAlignedStringRecord();
-    insertAlignedStringRecords();
+    //    insertAlignedStringRecord();
+    //    insertAlignedStringRecords();
 
-    insertTabletWithAlignedTimeseriesMethod1();
-    insertTabletWithAlignedTimeseriesMethod2();
-    insertNullableTabletWithAlignedTimeseries();
-    insertTabletsWithAlignedTimeseries();
+    //    insertTabletWithAlignedTimeseriesMethod1();
+    //    insertTabletWithAlignedTimeseriesMethod2();
+    //    insertNullableTabletWithAlignedTimeseries();
+    //    insertTabletsWithAlignedTimeseries();
 
     selectTest();
-    selectWithValueFilterTest();
-    selectWithGroupByTest();
-    selectWithLastTest();
+    //    selectWithValueFilterTest();
+    //    selectWithGroupByTest();
+    //    selectWithLastTest();
 
-    selectWithAggregationTest();
+    //    selectWithAggregationTest();
 
-    selectWithAlignByDeviceTest();
+    //    selectWithAlignByDeviceTest();
 
     session.close();
   }
 
   private static void selectTest() throws StatementExecutionException, IoTDBConnectionException {
-    SessionDataSet dataSet = session.executeQueryStatement("select s1 from root.sg_1.d1.vector");
+    SessionDataSet dataSet = session.executeQueryStatement("select s1 from root.sg_1.d1");
     System.out.println(dataSet.getColumnNames());
     while (dataSet.hasNext()) {
       System.out.println(dataSet.next());
     }
 
     dataSet.closeOperationHandle();
-    dataSet = session.executeQueryStatement("select * from root.sg_1.d1.vector");
+    dataSet = session.executeQueryStatement("select * from root.sg_1.d1");
     System.out.println(dataSet.getColumnNames());
     while (dataSet.hasNext()) {
       System.out.println(dataSet.next());
@@ -218,7 +218,7 @@ public class AlignedTimeseriesSessionExample {
       encodings.add(TSEncoding.RLE);
     }
     session.createAlignedTimeseries(
-        ROOT_SG1_D1_VECTOR2,
+        ROOT_SG1_D1_VECTOR1,
         multiMeasurementComponents,
         dataTypes,
         encodings,
@@ -404,12 +404,12 @@ public class AlignedTimeseriesSessionExample {
     types.add(TSDataType.INT64);
     types.add(TSDataType.INT32);
 
-    for (long time = 0; time < 1; time++) {
+    for (long time = 0; time < 10; time++) {
       List<Object> values = new ArrayList<>();
       values.add(1L);
       values.add(2);
       session.insertAlignedRecord(
-          ROOT_SG2_D1_VECTOR4, time, multiMeasurementComponents, types, values);
+          ROOT_SG1_D1_VECTOR1, time, multiMeasurementComponents, types, values);
     }
   }
 
diff --git a/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java b/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java
index 0070f94..fa3d256 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java
@@ -18,7 +18,7 @@
  */
 package org.apache.iotdb.db.query.reader.chunk;
 
-import org.apache.iotdb.tsfile.file.metadata.AlignedChunkMetadata;
+import java.io.IOException;
 import org.apache.iotdb.tsfile.file.metadata.IChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
@@ -30,8 +30,6 @@ import org.apache.iotdb.tsfile.read.filter.operator.AndFilter;
 import org.apache.iotdb.tsfile.read.reader.IPageReader;
 import org.apache.iotdb.tsfile.read.reader.IPointReader;
 
-import java.io.IOException;
-
 public class MemPageReader implements IPageReader {
 
   private final IPointReader timeValuePairIterator;
@@ -47,14 +45,7 @@ public class MemPageReader implements IPageReader {
 
   @Override
   public BatchData getAllSatisfiedPageData(boolean ascending) throws IOException {
-    TSDataType dataType;
-    if (chunkMetadata instanceof AlignedChunkMetadata
-        && ((AlignedChunkMetadata) chunkMetadata).getValueChunkMetadataList().size() == 1) {
-      dataType =
-          ((AlignedChunkMetadata) chunkMetadata).getValueChunkMetadataList().get(0).getDataType();
-    } else {
-      dataType = chunkMetadata.getDataType();
-    }
+    TSDataType  dataType = chunkMetadata.getDataType();
     BatchData batchData = BatchDataFactory.createBatchData(dataType, ascending, false);
     while (timeValuePairIterator.hasNextTimeValuePair()) {
       TimeValuePair timeValuePair = timeValuePairIterator.nextTimeValuePair();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java
index f56c94d..2ea48fa 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java
@@ -68,7 +68,7 @@ public class RestorableTsFileIOWriter extends TsFileIOWriter {
   private long maxPlanIndex = Long.MIN_VALUE;
 
   /** all chunk group metadata which have been serialized on disk. */
-  private Map<String, Map<String, List<ChunkMetadata>>> metadatasForQuery = new HashMap<>();
+  private final Map<String, Map<String, List<ChunkMetadata>>> metadatasForQuery = new HashMap<>();
 
   /**
    * @param file a given tsfile path you want to (continue to) write
@@ -235,7 +235,7 @@ public class RestorableTsFileIOWriter extends TsFileIOWriter {
         List<ChunkMetadata> rowMetaDataList = chunkGroupMetadata.getChunkMetadataList();
 
         String device = chunkGroupMetadata.getDevice();
-        for (IChunkMetadata chunkMetaData : rowMetaDataList) {
+        for (ChunkMetadata chunkMetaData : rowMetaDataList) {
           String measurementId = chunkMetaData.getMeasurementUid();
           if (!metadatasForQuery.containsKey(device)) {
             metadatasForQuery.put(device, new HashMap<>());
@@ -243,7 +243,7 @@ public class RestorableTsFileIOWriter extends TsFileIOWriter {
           if (!metadatasForQuery.get(device).containsKey(measurementId)) {
             metadatasForQuery.get(device).put(measurementId, new ArrayList<>());
           }
-          metadatasForQuery.get(device).get(measurementId).add((ChunkMetadata) chunkMetaData);
+          metadatasForQuery.get(device).get(measurementId).add(chunkMetaData);
         }
       }
     }

[iotdb] 02/03: Merge branch 'new_vector' of https://github.com/apache/iotdb into new_vector

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch new_vector
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 33838ee3a096c0a6361ed91e311eab0270fd8d75
Merge: 5c5bb20 9109e1f
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Thu Nov 4 19:31:01 2021 +0800

    Merge branch 'new_vector' of https://github.com/apache/iotdb into new_vector

 .../iotdb/AlignedTimeseriesSessionExample.java     | 55 +++++-----------
 .../iotdb/HybridTimeseriesSessionExample.java      |  9 +--
 .../inplace/manage/CrossSpaceMergeResource.java    |  5 +-
 .../cross/inplace/task/MergeMultiChunkTask.java    | 12 ++--
 .../db/utils/datastructure/AlignedTVList.java      | 76 +++++++++++++---------
 .../iotdb/db/utils/datastructure/TVList.java       |  6 +-
 .../db/utils/datastructure/VectorTVListTest.java   | 10 +--
 .../java/org/apache/iotdb/session/Session.java     | 34 ++++------
 .../org/apache/iotdb/session/pool/SessionPool.java | 20 ++++++
 .../session/IoTDBSessionVectorAggregationIT.java   |  9 +--
 .../iotdb/session/IoTDBSessionVectorInsertIT.java  |  9 +--
 .../apache/iotdb/tsfile/write/record/Tablet.java   |  4 ++
 12 files changed, 128 insertions(+), 121 deletions(-)


[iotdb] 03/03: support select only one series in vector and delete useless functions in IChunkWriter interface

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch new_vector
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 9e88c16b20205976db08aaacd72e33a5995d4d97
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Thu Nov 4 19:49:26 2021 +0800

    support select only one series in vector and delete useless functions in IChunkWriter interface
---
 .../iotdb/AlignedTimeseriesSessionExample.java     |   4 +-
 .../db/engine/cache/TimeSeriesMetadataCache.java   | 171 ---------------------
 .../inner/utils/InnerSpaceCompactionUtils.java     |   3 +-
 .../engine/memtable/AlignedWritableMemChunk.java   |  19 ++-
 .../db/engine/memtable/IWritableMemChunk.java      |   1 -
 .../iotdb/db/engine/memtable/WritableMemChunk.java |  16 +-
 .../iotdb/db/query/reader/chunk/MemPageReader.java |   5 +-
 .../apache/iotdb/db/tools/TsFileRewriteTool.java   |  12 +-
 .../org/apache/iotdb/db/utils/FileLoaderUtils.java |  51 +++---
 .../java/org/apache/iotdb/db/utils/MergeUtils.java |  35 ++---
 .../utils/CompactionFileGeneratorUtils.java        |   9 +-
 .../tsfile/write/chunk/AlignedChunkWriterImpl.java |  48 ------
 .../tsfile/write/chunk/ChunkGroupWriterImpl.java   |  23 ++-
 .../iotdb/tsfile/write/chunk/ChunkWriterImpl.java  |  41 ++---
 .../iotdb/tsfile/write/chunk/IChunkWriter.java     |  45 ------
 .../write/record/datapoint/BooleanDataPoint.java   |   6 +-
 .../tsfile/write/record/datapoint/DataPoint.java   |   4 +-
 .../write/record/datapoint/DoubleDataPoint.java    |   6 +-
 .../write/record/datapoint/FloatDataPoint.java     |   6 +-
 .../write/record/datapoint/IntDataPoint.java       |   6 +-
 .../write/record/datapoint/LongDataPoint.java      |   6 +-
 .../write/record/datapoint/StringDataPoint.java    |   6 +-
 22 files changed, 119 insertions(+), 404 deletions(-)

diff --git a/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java b/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java
index e4909de..e21cc64 100644
--- a/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java
+++ b/example/session/src/main/java/org/apache/iotdb/AlignedTimeseriesSessionExample.java
@@ -72,8 +72,10 @@ public class AlignedTimeseriesSessionExample {
     //    insertTabletWithAlignedTimeseriesMethod2();
     //    insertNullableTabletWithAlignedTimeseries();
     //    insertTabletsWithAlignedTimeseries();
-
     selectTest();
+    session.executeNonQueryStatement("flush");
+    selectTest();
+
     //    selectWithValueFilterTest();
     //    selectWithGroupByTest();
     //    selectWithLastTest();
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/cache/TimeSeriesMetadataCache.java b/server/src/main/java/org/apache/iotdb/db/engine/cache/TimeSeriesMetadataCache.java
index 94d834a..b9438a2 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/cache/TimeSeriesMetadataCache.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/cache/TimeSeriesMetadataCache.java
@@ -26,7 +26,6 @@ import org.apache.iotdb.db.query.control.FileReaderManager;
 import org.apache.iotdb.db.utils.TestOnly;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
 import org.apache.iotdb.tsfile.read.common.Path;
 import org.apache.iotdb.tsfile.utils.BloomFilter;
@@ -42,15 +41,11 @@ import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.lang.ref.WeakReference;
-import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
-import java.util.TreeSet;
 import java.util.WeakHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -182,172 +177,6 @@ public class TimeSeriesMetadataCache {
     }
   }
 
-  /**
-   * Support for vector. allSensors > subSensors
-   *
-   * @param key vector's own fullPath, e.g. root.sg1.d1.vector
-   * @param subSensorList all subSensors of this vector in query, e.g. [vector.s1, vector.s2,
-   *     vector.s3]
-   */
-  // Suppress synchronize warning
-  // Suppress high Cognitive Complexity warning
-  @SuppressWarnings({"squid:S1860", "squid:S3776"})
-  public List<TimeseriesMetadata> get(
-      TimeSeriesMetadataCacheKey key, List<String> subSensorList, boolean debug)
-      throws IOException {
-    Set<String> allSensors = new HashSet<>(subSensorList);
-    // add the time column
-    allSensors.add("");
-    if (!CACHE_ENABLE) {
-      // bloom filter part
-      TsFileSequenceReader reader = FileReaderManager.getInstance().get(key.filePath, true);
-      BloomFilter bloomFilter = reader.readBloomFilter();
-      if (bloomFilter != null
-          && !bloomFilter.contains(key.device + IoTDBConstant.PATH_SEPARATOR + key.measurement)) {
-        return Collections.emptyList();
-      }
-      return readTimeseriesMetadataForVector(reader, key, subSensorList, allSensors);
-    }
-
-    List<TimeseriesMetadata> res = new ArrayList<>();
-    getVectorTimeSeriesMetadataListFromCache(key, subSensorList, res);
-
-    if (res.isEmpty()) {
-      if (debug) {
-        DEBUG_LOGGER.info(
-            "Cache miss: {}.{} in file: {}", key.device, key.measurement, key.filePath);
-        DEBUG_LOGGER.info("Device: {}, all sensors: {}", key.device, allSensors);
-      }
-      // allow for the parallelism of different devices
-      synchronized (
-          devices.computeIfAbsent(key.device + SEPARATOR + key.filePath, WeakReference::new)) {
-        // double check
-        getVectorTimeSeriesMetadataListFromCache(key, subSensorList, res);
-        if (res.isEmpty()) {
-          Path path = new Path(key.device, key.measurement);
-          // bloom filter part
-          TsFileSequenceReader reader = FileReaderManager.getInstance().get(key.filePath, true);
-          BloomFilter bloomFilter = reader.readBloomFilter();
-          if (bloomFilter != null && !bloomFilter.contains(path.getFullPath())) {
-            if (debug) {
-              DEBUG_LOGGER.info("TimeSeries meta data {} is filter by bloomFilter!", key);
-            }
-            return Collections.emptyList();
-          }
-          res = readTimeseriesMetadataForVector(reader, key, subSensorList, allSensors);
-          Iterator<TimeseriesMetadata> iterator = res.iterator();
-          Set<String> subSensorSet = new HashSet<>(subSensorList);
-          subSensorSet.add(key.measurement);
-          // Note: allSensors > subSensors
-          // Put TimeSeriesMetadata of all sensors used in this query into cache
-          // Remove redundant TimeSeriesMetadata that do not belong to subSensors
-          while (iterator.hasNext()) {
-            TimeseriesMetadata metadata = iterator.next();
-            TimeSeriesMetadataCacheKey k =
-                new TimeSeriesMetadataCacheKey(
-                    key.filePath, key.device, metadata.getMeasurementId());
-            lruCache.put(k, metadata);
-            if (!subSensorSet.contains(metadata.getMeasurementId())) {
-              iterator.remove();
-            }
-          }
-        }
-      }
-    }
-    if (debug) {
-      if (res.isEmpty()) {
-        DEBUG_LOGGER.info("The file doesn't have this time series {}.", key);
-      } else {
-        DEBUG_LOGGER.info(
-            "Get timeseries: {}.{}  metadata in file: {}  from cache: {}.",
-            key.device,
-            key.measurement,
-            key.filePath,
-            res);
-      }
-    }
-
-    return res;
-  }
-
-  /**
-   * Support for vector, extraction of common function of `get`
-   *
-   * @param key vector's own fullPath, e.g. root.sg1.d1.vector
-   * @param subSensorList all subSensors of this vector in one query, e.g. [vector.s1, vector.s2,
-   *     vector.s3]
-   * @param allSensors all sensors of the device in one device, to vector, this should contain both
-   *     vector name and subSensors' name, e.g. [vector, vector.s1, vector.s2, vector.s3]
-   * @param reader TsFileSequenceReader created by file
-   */
-  private List<TimeseriesMetadata> readTimeseriesMetadataForVector(
-      TsFileSequenceReader reader,
-      TimeSeriesMetadataCacheKey key,
-      List<String> subSensorList,
-      Set<String> allSensors)
-      throws IOException {
-    Path path = new Path(key.device, key.measurement);
-    List<TimeseriesMetadata> timeSeriesMetadataList =
-        reader.readTimeseriesMetadata(path, allSensors);
-    // for new implementation of index tree, subSensor may not all stored in one leaf
-    // for this case, it's necessary to make sure all subSensor's timeseries add to list
-    TreeSet<String> subSensorsSet = new TreeSet<>(subSensorList);
-    for (int i = 0; i < timeSeriesMetadataList.size(); i++) {
-      TimeseriesMetadata tsMetadata = timeSeriesMetadataList.get(i);
-      if (tsMetadata.getTSDataType().equals(TSDataType.VECTOR)
-          && tsMetadata.getMeasurementId().equals(key.measurement)) {
-        for (int j = i + 1; j < timeSeriesMetadataList.size(); j++) {
-          tsMetadata = timeSeriesMetadataList.get(j);
-          if (!subSensorsSet.isEmpty() && subSensorsSet.contains(tsMetadata.getMeasurementId())) {
-            subSensorsSet.remove(tsMetadata.getMeasurementId());
-          }
-        }
-        break;
-      }
-    }
-    while (!subSensorsSet.isEmpty()) {
-      Path subPath = new Path(key.device, subSensorsSet.first());
-      List<TimeseriesMetadata> subTsMetaDataList =
-          reader.readTimeseriesMetadata(subPath, allSensors);
-      for (TimeseriesMetadata tsMetadata : subTsMetaDataList) {
-        if (!subSensorsSet.isEmpty() && subSensorsSet.contains(tsMetadata.getMeasurementId())) {
-          subSensorsSet.remove(tsMetadata.getMeasurementId());
-        }
-      }
-      timeSeriesMetadataList.addAll(subTsMetaDataList);
-    }
-    return timeSeriesMetadataList;
-  }
-
-  /**
-   * !!!Attention!!!
-   *
-   * <p>For a vector, e.g. root.sg1.d1.vector1(s1, s2) TimeSeriesMetadataCacheKey for vector1 should
-   * be {filePath: ""./data/data/seq/......., device: root.sg1.d1, measurement: vector1}, vector1
-   * will be in both device and measurement TimeSeriesMetadataCacheKey for vector1.s1 should be
-   * {filePath: ""./data/data/seq/......., device: root.sg1.d1, measurement: vector.s1}
-   * TimeSeriesMetadataCacheKey for vector1.s2 should be {filePath: ""./data/data/seq/.......,
-   * device: root.sg1.d1, measurement: vector.s2}
-   */
-  private void getVectorTimeSeriesMetadataListFromCache(
-      TimeSeriesMetadataCacheKey key, List<String> subSensorList, List<TimeseriesMetadata> res) {
-    TimeseriesMetadata timeseriesMetadata = lruCache.getIfPresent(key);
-    if (timeseriesMetadata != null) {
-      res.add(timeseriesMetadata);
-      for (String subSensor : subSensorList) {
-        timeseriesMetadata =
-            lruCache.getIfPresent(
-                new TimeSeriesMetadataCacheKey(key.filePath, key.device, subSensor));
-        if (timeseriesMetadata != null) {
-          res.add(timeseriesMetadata);
-        } else {
-          res.clear();
-          break;
-        }
-      }
-    }
-  }
-
   public double calculateTimeSeriesMetadataHitRatio() {
     return lruCache.stats().hitRate();
   }
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/compaction/inner/utils/InnerSpaceCompactionUtils.java b/server/src/main/java/org/apache/iotdb/db/engine/compaction/inner/utils/InnerSpaceCompactionUtils.java
index 8032cfe..abffc09 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/compaction/inner/utils/InnerSpaceCompactionUtils.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/compaction/inner/utils/InnerSpaceCompactionUtils.java
@@ -46,7 +46,6 @@ import org.apache.iotdb.tsfile.read.reader.IPointReader;
 import org.apache.iotdb.tsfile.read.reader.chunk.ChunkReaderByTimestamp;
 import org.apache.iotdb.tsfile.utils.Pair;
 import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
 import org.apache.iotdb.tsfile.write.writer.RestorableTsFileIOWriter;
 
 import com.google.common.util.concurrent.RateLimiter;
@@ -202,7 +201,7 @@ public class InnerSpaceCompactionUtils {
     if (isChunkMetadataEmpty) {
       return;
     }
-    IChunkWriter chunkWriter;
+    ChunkWriterImpl chunkWriter;
     try {
       chunkWriter =
           new ChunkWriterImpl(
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/memtable/AlignedWritableMemChunk.java b/server/src/main/java/org/apache/iotdb/db/engine/memtable/AlignedWritableMemChunk.java
index a04651e..1315a4c 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/memtable/AlignedWritableMemChunk.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/memtable/AlignedWritableMemChunk.java
@@ -225,7 +225,7 @@ public class AlignedWritableMemChunk implements IWritableMemChunk {
 
   @Override
   public void encode(IChunkWriter chunkWriter) {
-
+    AlignedChunkWriterImpl alignedChunkWriter = (AlignedChunkWriterImpl) chunkWriter;
     List<Integer> timeDuplicateAlignedRowIndexList = null;
     for (int sortedRowIndex = 0; sortedRowIndex < list.size(); sortedRowIndex++) {
       long time = list.getTime(sortedRowIndex);
@@ -253,24 +253,27 @@ public class AlignedWritableMemChunk implements IWritableMemChunk {
         boolean isNull = list.isValueMarked(originRowIndex, columnIndex);
         switch (dataTypes.get(columnIndex)) {
           case BOOLEAN:
-            chunkWriter.write(
+            alignedChunkWriter.write(
                 time, list.getBooleanByValueIndex(originRowIndex, columnIndex), isNull);
             break;
           case INT32:
-            chunkWriter.write(time, list.getIntByValueIndex(originRowIndex, columnIndex), isNull);
+            alignedChunkWriter.write(
+                time, list.getIntByValueIndex(originRowIndex, columnIndex), isNull);
             break;
           case INT64:
-            chunkWriter.write(time, list.getLongByValueIndex(originRowIndex, columnIndex), isNull);
+            alignedChunkWriter.write(
+                time, list.getLongByValueIndex(originRowIndex, columnIndex), isNull);
             break;
           case FLOAT:
-            chunkWriter.write(time, list.getFloatByValueIndex(originRowIndex, columnIndex), isNull);
+            alignedChunkWriter.write(
+                time, list.getFloatByValueIndex(originRowIndex, columnIndex), isNull);
             break;
           case DOUBLE:
-            chunkWriter.write(
+            alignedChunkWriter.write(
                 time, list.getDoubleByValueIndex(originRowIndex, columnIndex), isNull);
             break;
           case TEXT:
-            chunkWriter.write(
+            alignedChunkWriter.write(
                 time, list.getBinaryByValueIndex(originRowIndex, columnIndex), isNull);
             break;
           default:
@@ -280,7 +283,7 @@ public class AlignedWritableMemChunk implements IWritableMemChunk {
             break;
         }
       }
-      chunkWriter.write(time);
+      alignedChunkWriter.write(time);
       timeDuplicateAlignedRowIndexList = null;
     }
   }
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/memtable/IWritableMemChunk.java b/server/src/main/java/org/apache/iotdb/db/engine/memtable/IWritableMemChunk.java
index 203f4e6..ee10ea1 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/memtable/IWritableMemChunk.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/memtable/IWritableMemChunk.java
@@ -102,7 +102,6 @@ public interface IWritableMemChunk {
    *
    * <p>the mechanism is just like copy on write
    *
-   * @param measurementList the measurementList to be queried
    * @return sorted tv list
    */
   TVList getSortedTvListForQuery(List<IMeasurementSchema> schemaList);
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/memtable/WritableMemChunk.java b/server/src/main/java/org/apache/iotdb/db/engine/memtable/WritableMemChunk.java
index aeb1c66..99afe0e 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/memtable/WritableMemChunk.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/memtable/WritableMemChunk.java
@@ -302,6 +302,8 @@ public class WritableMemChunk implements IWritableMemChunk {
   @Override
   public void encode(IChunkWriter chunkWriter) {
 
+    ChunkWriterImpl chunkWriterImpl = (ChunkWriterImpl) chunkWriter;
+
     for (int sortedRowIndex = 0; sortedRowIndex < list.size(); sortedRowIndex++) {
       long time = list.getTime(sortedRowIndex);
 
@@ -312,27 +314,27 @@ public class WritableMemChunk implements IWritableMemChunk {
 
       // store last point for SDT
       if (sortedRowIndex + 1 == list.size()) {
-        ((ChunkWriterImpl) chunkWriter).setLastPoint(true);
+        ((ChunkWriterImpl) chunkWriterImpl).setLastPoint(true);
       }
 
       switch (schema.getType()) {
         case BOOLEAN:
-          chunkWriter.write(time, list.getBoolean(sortedRowIndex), false);
+          chunkWriterImpl.write(time, list.getBoolean(sortedRowIndex));
           break;
         case INT32:
-          chunkWriter.write(time, list.getInt(sortedRowIndex), false);
+          chunkWriterImpl.write(time, list.getInt(sortedRowIndex));
           break;
         case INT64:
-          chunkWriter.write(time, list.getLong(sortedRowIndex), false);
+          chunkWriterImpl.write(time, list.getLong(sortedRowIndex));
           break;
         case FLOAT:
-          chunkWriter.write(time, list.getFloat(sortedRowIndex), false);
+          chunkWriterImpl.write(time, list.getFloat(sortedRowIndex));
           break;
         case DOUBLE:
-          chunkWriter.write(time, list.getDouble(sortedRowIndex), false);
+          chunkWriterImpl.write(time, list.getDouble(sortedRowIndex));
           break;
         case TEXT:
-          chunkWriter.write(time, list.getBinary(sortedRowIndex), false);
+          chunkWriterImpl.write(time, list.getBinary(sortedRowIndex));
           break;
         default:
           LOGGER.error("WritableMemChunk does not support data type: {}", schema.getType());
diff --git a/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java b/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java
index fa3d256..ea64694 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/reader/chunk/MemPageReader.java
@@ -18,7 +18,6 @@
  */
 package org.apache.iotdb.db.query.reader.chunk;
 
-import java.io.IOException;
 import org.apache.iotdb.tsfile.file.metadata.IChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
@@ -30,6 +29,8 @@ import org.apache.iotdb.tsfile.read.filter.operator.AndFilter;
 import org.apache.iotdb.tsfile.read.reader.IPageReader;
 import org.apache.iotdb.tsfile.read.reader.IPointReader;
 
+import java.io.IOException;
+
 public class MemPageReader implements IPageReader {
 
   private final IPointReader timeValuePairIterator;
@@ -45,7 +46,7 @@ public class MemPageReader implements IPageReader {
 
   @Override
   public BatchData getAllSatisfiedPageData(boolean ascending) throws IOException {
-    TSDataType  dataType = chunkMetadata.getDataType();
+    TSDataType dataType = chunkMetadata.getDataType();
     BatchData batchData = BatchDataFactory.createBatchData(dataType, ascending, false);
     while (timeValuePairIterator.hasNextTimeValuePair()) {
       TimeValuePair timeValuePair = timeValuePairIterator.nextTimeValuePair();
diff --git a/server/src/main/java/org/apache/iotdb/db/tools/TsFileRewriteTool.java b/server/src/main/java/org/apache/iotdb/db/tools/TsFileRewriteTool.java
index b3183ab..8c61368 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/TsFileRewriteTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/TsFileRewriteTool.java
@@ -436,22 +436,22 @@ public class TsFileRewriteTool implements AutoCloseable {
       getOrDefaultTsFileIOWriter(oldTsFile, partitionId);
       switch (schema.getType()) {
         case INT32:
-          chunkWriter.write(time, (int) value, false);
+          chunkWriter.write(time, (int) value);
           break;
         case INT64:
-          chunkWriter.write(time, (long) value, false);
+          chunkWriter.write(time, (long) value);
           break;
         case FLOAT:
-          chunkWriter.write(time, (float) value, false);
+          chunkWriter.write(time, (float) value);
           break;
         case DOUBLE:
-          chunkWriter.write(time, (double) value, false);
+          chunkWriter.write(time, (double) value);
           break;
         case BOOLEAN:
-          chunkWriter.write(time, (boolean) value, false);
+          chunkWriter.write(time, (boolean) value);
           break;
         case TEXT:
-          chunkWriter.write(time, (Binary) value, false);
+          chunkWriter.write(time, (Binary) value);
           break;
         default:
           throw new UnSupportedDataTypeException(
diff --git a/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java b/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java
index e1e4594..85517fe 100644
--- a/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java
+++ b/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java
@@ -19,6 +19,7 @@
 package org.apache.iotdb.db.utils;
 
 import org.apache.iotdb.db.engine.cache.TimeSeriesMetadataCache;
+import org.apache.iotdb.db.engine.cache.TimeSeriesMetadataCache.TimeSeriesMetadataCacheKey;
 import org.apache.iotdb.db.engine.modification.Modification;
 import org.apache.iotdb.db.engine.storagegroup.TsFileResource;
 import org.apache.iotdb.db.metadata.path.AlignedPath;
@@ -47,6 +48,7 @@ import org.apache.iotdb.tsfile.read.reader.chunk.ChunkReader;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map.Entry;
 import java.util.Set;
@@ -165,31 +167,34 @@ public class FileLoaderUtils {
       // load all the TimeseriesMetadata of vector, the first one is for time column and the
       // remaining is for sub sensors
       // the order of timeSeriesMetadata list is same as subSensorList's order
-      List<TimeseriesMetadata> timeSeriesMetadata =
-          TimeSeriesMetadataCache.getInstance()
-              .get(
-                  new TimeSeriesMetadataCache.TimeSeriesMetadataCacheKey(
-                      resource.getTsFilePath(), vectorPath.getDevice(), ""),
-                  new ArrayList<>(vectorPath.getMeasurementList()),
-                  context.isDebug());
-
-      // assemble VectorTimeSeriesMetadata
-      if (timeSeriesMetadata != null && !timeSeriesMetadata.isEmpty()) {
-        timeSeriesMetadata
-            .get(0)
-            .setChunkMetadataLoader(
-                new DiskChunkMetadataLoader(resource, vectorPath, context, filter));
-        for (int i = 1; i < timeSeriesMetadata.size(); i++) {
-          PartialPath subPath = vectorPath.getPathWithMeasurement(i - 1);
-          timeSeriesMetadata
-              .get(i)
-              .setChunkMetadataLoader(
-                  new DiskChunkMetadataLoader(resource, subPath, context, filter));
+      TimeSeriesMetadataCache cache = TimeSeriesMetadataCache.getInstance();
+      List<String> valueMeasurementList = vectorPath.getMeasurementList();
+      ;
+      Set<String> allSensors = new HashSet<>(valueMeasurementList);
+      allSensors.add("");
+      boolean isDebug = context.isDebug();
+      String filePath = resource.getTsFilePath();
+      ;
+      String deviceId = vectorPath.getDevice();
+      TimeseriesMetadata timeColumn =
+          cache.get(new TimeSeriesMetadataCacheKey(filePath, deviceId, ""), allSensors, isDebug);
+      if (timeColumn != null) {
+        timeColumn.setChunkMetadataLoader(
+            new DiskChunkMetadataLoader(resource, vectorPath, context, filter));
+        List<TimeseriesMetadata> valueTimeSeriesMetadataList =
+            new ArrayList<>(valueMeasurementList.size());
+        for (String valueMeasurement : valueMeasurementList) {
+          TimeseriesMetadata valueColumn =
+              cache.get(
+                  new TimeSeriesMetadataCacheKey(filePath, deviceId, valueMeasurement),
+                  allSensors,
+                  isDebug);
+          valueColumn.setChunkMetadataLoader(
+              new DiskChunkMetadataLoader(resource, vectorPath, context, filter));
+          valueTimeSeriesMetadataList.add(valueColumn);
         }
         alignedTimeSeriesMetadata =
-            new AlignedTimeSeriesMetadata(
-                timeSeriesMetadata.get(0),
-                timeSeriesMetadata.subList(1, timeSeriesMetadata.size()));
+            new AlignedTimeSeriesMetadata(timeColumn, valueTimeSeriesMetadataList);
       }
     } else { // if the tsfile is unclosed, we just get it directly from TsFileResource
       alignedTimeSeriesMetadata = (AlignedTimeSeriesMetadata) resource.getTimeSeriesMetadata();
diff --git a/server/src/main/java/org/apache/iotdb/db/utils/MergeUtils.java b/server/src/main/java/org/apache/iotdb/db/utils/MergeUtils.java
index 343c03d..052c834 100644
--- a/server/src/main/java/org/apache/iotdb/db/utils/MergeUtils.java
+++ b/server/src/main/java/org/apache/iotdb/db/utils/MergeUtils.java
@@ -30,7 +30,7 @@ import org.apache.iotdb.tsfile.read.common.BatchData;
 import org.apache.iotdb.tsfile.read.common.Chunk;
 import org.apache.iotdb.tsfile.read.common.Path;
 import org.apache.iotdb.tsfile.read.reader.chunk.ChunkReader;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,28 +50,25 @@ public class MergeUtils {
     // util class
   }
 
-  public static void writeTVPair(TimeValuePair timeValuePair, IChunkWriter chunkWriter) {
+  public static void writeTVPair(TimeValuePair timeValuePair, ChunkWriterImpl chunkWriter) {
     switch (chunkWriter.getDataType()) {
       case TEXT:
-        chunkWriter.write(
-            timeValuePair.getTimestamp(), timeValuePair.getValue().getBinary(), false);
+        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getBinary());
         break;
       case DOUBLE:
-        chunkWriter.write(
-            timeValuePair.getTimestamp(), timeValuePair.getValue().getDouble(), false);
+        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getDouble());
         break;
       case BOOLEAN:
-        chunkWriter.write(
-            timeValuePair.getTimestamp(), timeValuePair.getValue().getBoolean(), false);
+        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getBoolean());
         break;
       case INT64:
-        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getLong(), false);
+        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getLong());
         break;
       case INT32:
-        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getInt(), false);
+        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getInt());
         break;
       case FLOAT:
-        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getFloat(), false);
+        chunkWriter.write(timeValuePair.getTimestamp(), timeValuePair.getValue().getFloat());
         break;
       default:
         throw new UnsupportedOperationException("Unknown data type " + chunkWriter.getDataType());
@@ -95,7 +92,7 @@ public class MergeUtils {
     return totalSize;
   }
 
-  public static int writeChunkWithoutUnseq(Chunk chunk, IChunkWriter chunkWriter)
+  public static int writeChunkWithoutUnseq(Chunk chunk, ChunkWriterImpl chunkWriter)
       throws IOException {
     ChunkReader chunkReader = new ChunkReader(chunk, null);
     int ptWritten = 0;
@@ -109,25 +106,25 @@ public class MergeUtils {
     return ptWritten;
   }
 
-  public static void writeBatchPoint(BatchData batchData, int i, IChunkWriter chunkWriter) {
+  public static void writeBatchPoint(BatchData batchData, int i, ChunkWriterImpl chunkWriter) {
     switch (chunkWriter.getDataType()) {
       case TEXT:
-        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getBinaryByIndex(i), false);
+        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getBinaryByIndex(i));
         break;
       case DOUBLE:
-        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getDoubleByIndex(i), false);
+        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getDoubleByIndex(i));
         break;
       case BOOLEAN:
-        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getBooleanByIndex(i), false);
+        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getBooleanByIndex(i));
         break;
       case INT64:
-        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getLongByIndex(i), false);
+        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getLongByIndex(i));
         break;
       case INT32:
-        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getIntByIndex(i), false);
+        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getIntByIndex(i));
         break;
       case FLOAT:
-        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getFloatByIndex(i), false);
+        chunkWriter.write(batchData.getTimeByIndex(i), batchData.getFloatByIndex(i));
         break;
       default:
         throw new UnsupportedOperationException("Unknown data type " + chunkWriter.getDataType());
diff --git a/server/src/test/java/org/apache/iotdb/db/engine/compaction/utils/CompactionFileGeneratorUtils.java b/server/src/test/java/org/apache/iotdb/db/engine/compaction/utils/CompactionFileGeneratorUtils.java
index 579c145..57b4b6b 100644
--- a/server/src/test/java/org/apache/iotdb/db/engine/compaction/utils/CompactionFileGeneratorUtils.java
+++ b/server/src/test/java/org/apache/iotdb/db/engine/compaction/utils/CompactionFileGeneratorUtils.java
@@ -32,7 +32,6 @@ import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.utils.Pair;
 import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
 import org.apache.iotdb.tsfile.write.schema.UnaryMeasurementSchema;
 import org.apache.iotdb.tsfile.write.writer.RestorableTsFileIOWriter;
 
@@ -126,11 +125,11 @@ public class CompactionFileGeneratorUtils {
       for (String sensor : deviceMeasurementEntry.getValue()) {
         long currTime = startTime;
         for (List<Long> chunk : chunkPagePointsNum) {
-          IChunkWriter chunkWriter =
+          ChunkWriterImpl chunkWriter =
               new ChunkWriterImpl(new UnaryMeasurementSchema(sensor, TSDataType.INT64), true);
           for (Long page : chunk) {
             for (long i = 0; i < page; i++) {
-              chunkWriter.write(currTime, currTime, false);
+              chunkWriter.write(currTime, currTime);
               newTsFileResource.updateStartTime(device, currTime);
               newTsFileResource.updateEndTime(device, currTime);
               currTime++;
@@ -183,12 +182,12 @@ public class CompactionFileGeneratorUtils {
       writer.startChunkGroup(device);
       for (String sensor : deviceMeasurementEntry.getValue()) {
         List<long[][]> chunks = chunkPagePointsNum.get(currChunksIndex);
-        IChunkWriter chunkWriter =
+        ChunkWriterImpl chunkWriter =
             new ChunkWriterImpl(new UnaryMeasurementSchema(sensor, TSDataType.INT64), true);
         for (long[][] pages : chunks) {
           for (long[] starEndTime : pages) {
             for (long i = starEndTime[0]; i < starEndTime[1]; i++) {
-              chunkWriter.write(i, i, false);
+              chunkWriter.write(i, i);
               newTsFileResource.updateStartTime(device, i);
               newTsFileResource.updateEndTime(device, i);
             }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/AlignedChunkWriterImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/AlignedChunkWriterImpl.java
index f466d80..0bd9bc5 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/AlignedChunkWriterImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/AlignedChunkWriterImpl.java
@@ -63,37 +63,30 @@ public class AlignedChunkWriterImpl implements IChunkWriter {
     this.valueIndex = 0;
   }
 
-  @Override
   public void write(long time, int value, boolean isNull) {
     valueChunkWriterList.get(valueIndex++).write(time, value, isNull);
   }
 
-  @Override
   public void write(long time, long value, boolean isNull) {
     valueChunkWriterList.get(valueIndex++).write(time, value, isNull);
   }
 
-  @Override
   public void write(long time, boolean value, boolean isNull) {
     valueChunkWriterList.get(valueIndex++).write(time, value, isNull);
   }
 
-  @Override
   public void write(long time, float value, boolean isNull) {
     valueChunkWriterList.get(valueIndex++).write(time, value, isNull);
   }
 
-  @Override
   public void write(long time, double value, boolean isNull) {
     valueChunkWriterList.get(valueIndex++).write(time, value, isNull);
   }
 
-  @Override
   public void write(long time, Binary value, boolean isNull) {
     valueChunkWriterList.get(valueIndex++).write(time, value, isNull);
   }
 
-  @Override
   public void write(long time) {
     valueIndex = 0;
     timeChunkWriter.write(time);
@@ -102,37 +95,6 @@ public class AlignedChunkWriterImpl implements IChunkWriter {
     }
   }
 
-  // TODO tsfile write interface
-  @Override
-  public void write(long[] timestamps, int[] values, int batchSize) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void write(long[] timestamps, long[] values, int batchSize) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void write(long[] timestamps, boolean[] values, int batchSize) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void write(long[] timestamps, float[] values, int batchSize) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void write(long[] timestamps, double[] values, int batchSize) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void write(long[] timestamps, Binary[] values, int batchSize) {
-    throw new UnsupportedOperationException();
-  }
-
   /**
    * check occupied memory size, if it exceeds the PageSize threshold, construct a page and put it
    * to pageBuffer
@@ -188,14 +150,4 @@ public class AlignedChunkWriterImpl implements IChunkWriter {
       valueChunkWriter.clearPageWriter();
     }
   }
-
-  @Override
-  public int getNumOfPages() {
-    return timeChunkWriter.getNumOfPages();
-  }
-
-  @Override
-  public TSDataType getDataType() {
-    return TSDataType.VECTOR;
-  }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkGroupWriterImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkGroupWriterImpl.java
index b7ceb30..f7887e6 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkGroupWriterImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkGroupWriterImpl.java
@@ -74,7 +74,7 @@ public class ChunkGroupWriterImpl implements IChunkGroupWriter {
         throw new NoMeasurementException(
             "time " + time + ", measurement id " + measurementId + " not found!");
       }
-      point.writeTo(time, chunkWriters.get(measurementId));
+      point.writeTo(time, (ChunkWriterImpl) chunkWriters.get(measurementId));
     }
   }
 
@@ -109,7 +109,8 @@ public class ChunkGroupWriterImpl implements IChunkGroupWriter {
     VectorMeasurementSchema vectorMeasurementSchema =
         (VectorMeasurementSchema) tablet.getSchemas().get(index);
     List<TSDataType> valueDataTypes = vectorMeasurementSchema.getSubMeasurementsTSDataTypeList();
-    IChunkWriter vectorChunkWriter = chunkWriters.get(measurement);
+    AlignedChunkWriterImpl vectorChunkWriter =
+        (AlignedChunkWriterImpl) chunkWriters.get(measurement);
     for (int row = 0; row < batchSize; row++) {
       long time = tablet.timestamps[row];
       for (int columnIndex = 0; columnIndex < valueDataTypes.size(); columnIndex++) {
@@ -161,33 +162,27 @@ public class ChunkGroupWriterImpl implements IChunkGroupWriter {
     int batchSize = tablet.rowSize;
     switch (dataType) {
       case INT32:
-        chunkWriters
-            .get(measurementId)
+        ((ChunkWriterImpl) chunkWriters.get(measurementId))
             .write(tablet.timestamps, (int[]) tablet.values[index], batchSize);
         break;
       case INT64:
-        chunkWriters
-            .get(measurementId)
+        ((ChunkWriterImpl) chunkWriters.get(measurementId))
             .write(tablet.timestamps, (long[]) tablet.values[index], batchSize);
         break;
       case FLOAT:
-        chunkWriters
-            .get(measurementId)
+        ((ChunkWriterImpl) chunkWriters.get(measurementId))
             .write(tablet.timestamps, (float[]) tablet.values[index], batchSize);
         break;
       case DOUBLE:
-        chunkWriters
-            .get(measurementId)
+        ((ChunkWriterImpl) chunkWriters.get(measurementId))
             .write(tablet.timestamps, (double[]) tablet.values[index], batchSize);
         break;
       case BOOLEAN:
-        chunkWriters
-            .get(measurementId)
+        ((ChunkWriterImpl) chunkWriters.get(measurementId))
             .write(tablet.timestamps, (boolean[]) tablet.values[index], batchSize);
         break;
       case TEXT:
-        chunkWriters
-            .get(measurementId)
+        ((ChunkWriterImpl) chunkWriters.get(measurementId))
             .write(tablet.timestamps, (Binary[]) tablet.values[index], batchSize);
         break;
       default:
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
index f93cad3..d603c58 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
@@ -46,12 +46,12 @@ public class ChunkWriterImpl implements IChunkWriter {
 
   private static final Logger logger = LoggerFactory.getLogger(ChunkWriterImpl.class);
 
-  private IMeasurementSchema measurementSchema;
+  private final IMeasurementSchema measurementSchema;
 
-  private ICompressor compressor;
+  private final ICompressor compressor;
 
   /** all pages of this chunk. */
-  private PublicBAOS pageBuffer;
+  private final PublicBAOS pageBuffer;
 
   private int numOfPages;
 
@@ -145,8 +145,7 @@ public class ChunkWriterImpl implements IChunkWriter {
     }
   }
 
-  @Override
-  public void write(long time, long value, boolean isNull) {
+  public void write(long time, long value) {
     // store last point for sdtEncoding, it still needs to go through encoding process
     // in case it exceeds compdev and needs to store second last point
     if (!isSdtEncoding || sdtEncoder.encodeLong(time, value)) {
@@ -160,8 +159,7 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
-  public void write(long time, int value, boolean isNull) {
+  public void write(long time, int value) {
     if (!isSdtEncoding || sdtEncoder.encodeInt(time, value)) {
       pageWriter.write(
           isSdtEncoding ? sdtEncoder.getTime() : time,
@@ -173,14 +171,12 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
-  public void write(long time, boolean value, boolean isNull) {
+  public void write(long time, boolean value) {
     pageWriter.write(time, value);
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
-  public void write(long time, float value, boolean isNull) {
+  public void write(long time, float value) {
     if (!isSdtEncoding || sdtEncoder.encodeFloat(time, value)) {
       pageWriter.write(
           isSdtEncoding ? sdtEncoder.getTime() : time,
@@ -193,8 +189,7 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
-  public void write(long time, double value, boolean isNull) {
+  public void write(long time, double value) {
     if (!isSdtEncoding || sdtEncoder.encodeDouble(time, value)) {
       pageWriter.write(
           isSdtEncoding ? sdtEncoder.getTime() : time,
@@ -206,18 +201,11 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
-  public void write(long time, Binary value, boolean isNull) {
+  public void write(long time, Binary value) {
     pageWriter.write(time, value);
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
-  public void write(long time) {
-    throw new IllegalStateException("write time method is not implemented in common chunk writer");
-  }
-
-  @Override
   public void write(long[] timestamps, int[] values, int batchSize) {
     if (isSdtEncoding) {
       batchSize = sdtEncoder.encode(timestamps, values, batchSize);
@@ -226,7 +214,6 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
   public void write(long[] timestamps, long[] values, int batchSize) {
     if (isSdtEncoding) {
       batchSize = sdtEncoder.encode(timestamps, values, batchSize);
@@ -235,13 +222,11 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
   public void write(long[] timestamps, boolean[] values, int batchSize) {
     pageWriter.write(timestamps, values, batchSize);
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
   public void write(long[] timestamps, float[] values, int batchSize) {
     if (isSdtEncoding) {
       batchSize = sdtEncoder.encode(timestamps, values, batchSize);
@@ -250,7 +235,6 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
   public void write(long[] timestamps, double[] values, int batchSize) {
     if (isSdtEncoding) {
       batchSize = sdtEncoder.encode(timestamps, values, batchSize);
@@ -259,7 +243,6 @@ public class ChunkWriterImpl implements IChunkWriter {
     checkPageSizeAndMayOpenANewPage();
   }
 
-  @Override
   public void write(long[] timestamps, Binary[] values, int batchSize) {
     pageWriter.write(timestamps, values, batchSize);
     checkPageSizeAndMayOpenANewPage();
@@ -365,12 +348,6 @@ public class ChunkWriterImpl implements IChunkWriter {
     pageWriter = null;
   }
 
-  @Override
-  public int getNumOfPages() {
-    return numOfPages;
-  }
-
-  @Override
   public TSDataType getDataType() {
     return measurementSchema.getType();
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/IChunkWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/IChunkWriter.java
index fcef96a..992862e 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/IChunkWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/IChunkWriter.java
@@ -18,8 +18,6 @@
  */
 package org.apache.iotdb.tsfile.write.chunk;
 
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.utils.Binary;
 import org.apache.iotdb.tsfile.write.writer.TsFileIOWriter;
 
 import java.io.IOException;
@@ -27,45 +25,6 @@ import java.io.IOException;
 /** IChunkWriter provides a list of writing methods for different value types. */
 public interface IChunkWriter {
 
-  /** write a time value pair. */
-  void write(long time, int value, boolean isNull);
-
-  /** write a time value pair. */
-  void write(long time, long value, boolean isNull);
-
-  /** write a time value pair. */
-  void write(long time, boolean value, boolean isNull);
-
-  /** write a time value pair. */
-  void write(long time, float value, boolean isNull);
-
-  /** write a time value pair. */
-  void write(long time, double value, boolean isNull);
-
-  /** write a time value pair. */
-  void write(long time, Binary value, boolean isNull);
-
-  /** write a time. */
-  void write(long time);
-
-  /** write time series */
-  void write(long[] timestamps, int[] values, int batchSize);
-
-  /** write time series */
-  void write(long[] timestamps, long[] values, int batchSize);
-
-  /** write time series */
-  void write(long[] timestamps, boolean[] values, int batchSize);
-
-  /** write time series */
-  void write(long[] timestamps, float[] values, int batchSize);
-
-  /** write time series */
-  void write(long[] timestamps, double[] values, int batchSize);
-
-  /** write time series */
-  void write(long[] timestamps, Binary[] values, int batchSize);
-
   /** flush data to TsFileIOWriter. */
   void writeToFileWriter(TsFileIOWriter tsfileWriter) throws IOException;
 
@@ -86,8 +45,4 @@ public interface IChunkWriter {
 
   /** set the current pageWriter to null, friendly for gc */
   void clearPageWriter();
-
-  int getNumOfPages();
-
-  TSDataType getDataType();
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/BooleanDataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/BooleanDataPoint.java
index e8b4bee..62cfdd4 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/BooleanDataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/BooleanDataPoint.java
@@ -19,7 +19,7 @@
 package org.apache.iotdb.tsfile.write.record.datapoint;
 
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,12 +42,12 @@ public class BooleanDataPoint extends DataPoint {
   }
 
   @Override
-  public void writeTo(long time, IChunkWriter writer) {
+  public void writeTo(long time, ChunkWriterImpl writer) {
     if (writer == null) {
       LOG.warn("given IChunkWriter is null, do nothing and return");
       return;
     }
-    writer.write(time, value, false);
+    writer.write(time, value);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DataPoint.java
index 8497651..a452fc8 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DataPoint.java
@@ -23,7 +23,7 @@ import org.apache.iotdb.tsfile.exception.write.UnSupportedDataTypeException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.utils.Binary;
 import org.apache.iotdb.tsfile.utils.StringContainer;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import java.io.IOException;
 import java.math.BigDecimal;
@@ -101,7 +101,7 @@ public abstract class DataPoint {
    * @param writer writer
    * @throws IOException exception in IO
    */
-  public abstract void writeTo(long time, IChunkWriter writer) throws IOException;
+  public abstract void writeTo(long time, ChunkWriterImpl writer) throws IOException;
 
   public String getMeasurementId() {
     return measurementId;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DoubleDataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DoubleDataPoint.java
index 853313e..ed00375 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DoubleDataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/DoubleDataPoint.java
@@ -19,7 +19,7 @@
 package org.apache.iotdb.tsfile.write.record.datapoint;
 
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,12 +42,12 @@ public class DoubleDataPoint extends DataPoint {
   }
 
   @Override
-  public void writeTo(long time, IChunkWriter writer) {
+  public void writeTo(long time, ChunkWriterImpl writer) {
     if (writer == null) {
       LOG.warn("given IChunkWriter is null, do nothing and return");
       return;
     }
-    writer.write(time, value, false);
+    writer.write(time, value);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/FloatDataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/FloatDataPoint.java
index 863be98..f00bdbe 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/FloatDataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/FloatDataPoint.java
@@ -19,7 +19,7 @@
 package org.apache.iotdb.tsfile.write.record.datapoint;
 
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,12 +42,12 @@ public class FloatDataPoint extends DataPoint {
   }
 
   @Override
-  public void writeTo(long time, IChunkWriter writer) {
+  public void writeTo(long time, ChunkWriterImpl writer) {
     if (writer == null) {
       LOG.warn("given IChunkWriter is null, do nothing and return");
       return;
     }
-    writer.write(time, value, false);
+    writer.write(time, value);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/IntDataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/IntDataPoint.java
index 02e0d5c..14bab49 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/IntDataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/IntDataPoint.java
@@ -19,7 +19,7 @@
 package org.apache.iotdb.tsfile.write.record.datapoint;
 
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,12 +42,12 @@ public class IntDataPoint extends DataPoint {
   }
 
   @Override
-  public void writeTo(long time, IChunkWriter writer) {
+  public void writeTo(long time, ChunkWriterImpl writer) {
     if (writer == null) {
       LOG.warn("given IChunkWriter is null, do nothing and return");
       return;
     }
-    writer.write(time, value, false);
+    writer.write(time, value);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/LongDataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/LongDataPoint.java
index 8dce510..f2549f5 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/LongDataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/LongDataPoint.java
@@ -19,7 +19,7 @@
 package org.apache.iotdb.tsfile.write.record.datapoint;
 
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,12 +42,12 @@ public class LongDataPoint extends DataPoint {
   }
 
   @Override
-  public void writeTo(long time, IChunkWriter writer) {
+  public void writeTo(long time, ChunkWriterImpl writer) {
     if (writer == null) {
       LOG.warn("given IChunkWriter is null, do nothing and return");
       return;
     }
-    writer.write(time, value, false);
+    writer.write(time, value);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/StringDataPoint.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/StringDataPoint.java
index cf371bc..72c019b 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/StringDataPoint.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/record/datapoint/StringDataPoint.java
@@ -20,7 +20,7 @@ package org.apache.iotdb.tsfile.write.record.datapoint;
 
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.utils.Binary;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -43,12 +43,12 @@ public class StringDataPoint extends DataPoint {
   }
 
   @Override
-  public void writeTo(long time, IChunkWriter writer) {
+  public void writeTo(long time, ChunkWriterImpl writer) {
     if (writer == null) {
       LOG.warn("given IChunkWriter is null, do nothing and return");
       return;
     }
-    writer.write(time, value, false);
+    writer.write(time, value);
   }
 
   @Override