You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by ja...@apache.org on 2020/12/02 08:04:24 UTC

[iotdb] branch NewTsFile created (now 0b153b2)

This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a change to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git.


      at 0b153b2  fix bug

This branch includes the following new commits:

     new 3d01eec  init
     new 314c38a  change some
     new f51296c  some changes
     new 6b8a060  next is to delete Edian type
     new 5470d7c  change encode and decode way
     new c0a7ee3  fix compile error
     new e84b33e  debug
     new 57bc1b6  some changes
     new 50b9da4  have a good day
     new d5c34e8  fuck bug day
     new 0b153b2  fix bug

The 11 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[iotdb] 06/11: fix compile error

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit c0a7ee3b94c1d67a01ba74a1a5209fdadb0f6d48
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Thu Nov 26 11:54:46 2020 +0800

    fix compile error
---
 .../iotdb/db/engine/storagegroup/StorageGroupProcessor.java | 11 +++++------
 .../apache/iotdb/db/engine/storagegroup/TsFileResource.java | 13 +++++--------
 .../apache/iotdb/db/query/aggregation/AggregateResult.java  |  2 +-
 .../iotdb/db/query/aggregation/impl/AvgAggrResult.java      | 12 ++++++++++--
 .../iotdb/db/query/aggregation/impl/SumAggrResult.java      | 10 ++++++++--
 .../impl/FixLengthIExternalSortFileDeserializer.java        |  2 +-
 6 files changed, 30 insertions(+), 20 deletions(-)

diff --git a/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/StorageGroupProcessor.java b/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/StorageGroupProcessor.java
index 22ddad1..7b8221f 100755
--- a/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/StorageGroupProcessor.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/StorageGroupProcessor.java
@@ -80,7 +80,6 @@ import org.apache.iotdb.db.qp.physical.crud.InsertTabletPlan;
 import org.apache.iotdb.db.query.context.QueryContext;
 import org.apache.iotdb.db.query.control.QueryFileManager;
 import org.apache.iotdb.db.service.IoTDB;
-import org.apache.iotdb.db.service.UpgradeSevice;
 import org.apache.iotdb.db.utils.CopyOnReadLinkedList;
 import org.apache.iotdb.db.writelog.recover.TsFileRecoverPerformer;
 import org.apache.iotdb.rpc.RpcUtils;
@@ -1738,12 +1737,12 @@ public class StorageGroupProcessor {
     for (TsFileResource seqTsFileResource : upgradeSeqFileList) {
       seqTsFileResource.setSeq(true);
       seqTsFileResource.setUpgradeTsFileResourceCallBack(this::upgradeTsFileResourceCallBack);
-      seqTsFileResource.doUpgrade();
+//      seqTsFileResource.doUpgrade();
     }
     for (TsFileResource unseqTsFileResource : upgradeUnseqFileList) {
       unseqTsFileResource.setSeq(false);
       unseqTsFileResource.setUpgradeTsFileResourceCallBack(this::upgradeTsFileResourceCallBack);
-      unseqTsFileResource.doUpgrade();
+//      unseqTsFileResource.doUpgrade();
     }
   }
 
@@ -1787,9 +1786,9 @@ public class StorageGroupProcessor {
           }
         }
       }
-      if (StorageEngine.getInstance().countUpgradeFiles() == 0) {
-        UpgradeSevice.getINSTANCE().stop();
-      }
+//      if (StorageEngine.getInstance().countUpgradeFiles() == 0) {
+//        UpgradeSevice.getINSTANCE().stop();
+//      }
     }
   }
 
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/TsFileResource.java b/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/TsFileResource.java
index 58f37b3..5f338ae 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/TsFileResource.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/storagegroup/TsFileResource.java
@@ -42,12 +42,9 @@ import org.apache.iotdb.db.engine.StorageEngine;
 import org.apache.iotdb.db.engine.modification.ModificationFile;
 import org.apache.iotdb.db.engine.querycontext.ReadOnlyMemChunk;
 import org.apache.iotdb.db.engine.storagegroup.StorageGroupProcessor.UpgradeTsFileResourceCallBack;
-import org.apache.iotdb.db.engine.upgrade.UpgradeTask;
 import org.apache.iotdb.db.exception.PartitionViolationException;
 import org.apache.iotdb.db.rescon.CachedStringPool;
-import org.apache.iotdb.db.service.UpgradeSevice;
 import org.apache.iotdb.db.utils.FilePathUtils;
-import org.apache.iotdb.db.utils.UpgradeUtils;
 import org.apache.iotdb.tsfile.common.constant.TsFileConstant;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
@@ -570,11 +567,11 @@ public class TsFileResource {
     return tsFileLock.tryWriteLock();
   }
 
-  void doUpgrade() {
-    if (UpgradeUtils.isNeedUpgrade(this)) {
-      UpgradeSevice.getINSTANCE().submitUpgradeTask(new UpgradeTask(this));
-    }
-  }
+//  void doUpgrade() {
+//    if (UpgradeUtils.isNeedUpgrade(this)) {
+//      UpgradeSevice.getINSTANCE().submitUpgradeTask(new UpgradeTask(this));
+//    }
+//  }
 
   public void removeModFile() throws IOException {
     getModFile().remove();
diff --git a/server/src/main/java/org/apache/iotdb/db/query/aggregation/AggregateResult.java b/server/src/main/java/org/apache/iotdb/db/query/aggregation/AggregateResult.java
index 840fd68..fe84d34 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/aggregation/AggregateResult.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/aggregation/AggregateResult.java
@@ -108,7 +108,7 @@ public abstract class AggregateResult {
 
   public static AggregateResult deserializeFrom(ByteBuffer buffer) {
     AggregationType aggregationType = AggregationType.deserialize(buffer);
-    TSDataType dataType = TSDataType.deserialize(buffer.getShort());
+    TSDataType dataType = TSDataType.deserialize(buffer.get());
     boolean ascending = ReadWriteIOUtils.readBool(buffer);
     AggregateResult aggregateResult = AggregateResultFactory
         .getAggrResultByType(aggregationType, dataType, ascending);
diff --git a/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/AvgAggrResult.java b/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/AvgAggrResult.java
index 526e41c..4120846 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/AvgAggrResult.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/AvgAggrResult.java
@@ -27,6 +27,8 @@ import org.apache.iotdb.db.query.aggregation.AggregationType;
 import org.apache.iotdb.db.query.reader.series.IReaderByTimestamp;
 import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.file.metadata.statistics.BooleanStatistics;
+import org.apache.iotdb.tsfile.file.metadata.statistics.IntegerStatistics;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 import org.apache.iotdb.tsfile.read.common.BatchData;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
@@ -68,8 +70,14 @@ public class AvgAggrResult extends AggregateResult {
       throw new StatisticsClassException("Binary statistics does not support: avg");
     }
     cnt += statistics.getCount();
+    double sum;
+    if (statistics instanceof IntegerStatistics || statistics instanceof BooleanStatistics) {
+      sum = statistics.getSumLongValue();
+    } else {
+      sum = statistics.getSumDoubleValue();
+    }
     avg = avg * ((double) preCnt / cnt) + ((double) statistics.getCount() / cnt)
-        * statistics.getSumValue() / statistics.getCount();
+        * sum / statistics.getCount();
   }
 
   @Override
@@ -144,7 +152,7 @@ public class AvgAggrResult extends AggregateResult {
 
   @Override
   protected void deserializeSpecificFields(ByteBuffer buffer) {
-    this.seriesDataType = TSDataType.deserialize(buffer.getShort());
+    this.seriesDataType = TSDataType.deserialize(buffer.get());
     this.avg = buffer.getDouble();
     this.cnt = buffer.getLong();
   }
diff --git a/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/SumAggrResult.java b/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/SumAggrResult.java
index 78e8ad4..d51b83e 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/SumAggrResult.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/aggregation/impl/SumAggrResult.java
@@ -26,6 +26,8 @@ import org.apache.iotdb.db.query.aggregation.AggregateResult;
 import org.apache.iotdb.db.query.aggregation.AggregationType;
 import org.apache.iotdb.db.query.reader.series.IReaderByTimestamp;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.file.metadata.statistics.BooleanStatistics;
+import org.apache.iotdb.tsfile.file.metadata.statistics.IntegerStatistics;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 import org.apache.iotdb.tsfile.read.common.BatchData;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
@@ -49,7 +51,11 @@ public class SumAggrResult extends AggregateResult {
   @Override
   public void updateResultFromStatistics(Statistics statistics) {
     double preValue = getDoubleValue();
-    preValue += statistics.getSumValue();
+    if (statistics instanceof IntegerStatistics || statistics instanceof BooleanStatistics) {
+      preValue += statistics.getSumLongValue();
+    } else {
+      preValue += statistics.getSumDoubleValue();
+    }
     setDoubleValue(preValue);
   }
 
@@ -118,7 +124,7 @@ public class SumAggrResult extends AggregateResult {
 
   @Override
   protected void deserializeSpecificFields(ByteBuffer buffer) {
-    seriesDataType = TSDataType.deserialize(buffer.getShort());
+    seriesDataType = TSDataType.deserialize(buffer.get());
   }
 
   @Override
diff --git a/server/src/main/java/org/apache/iotdb/db/query/externalsort/serialize/impl/FixLengthIExternalSortFileDeserializer.java b/server/src/main/java/org/apache/iotdb/db/query/externalsort/serialize/impl/FixLengthIExternalSortFileDeserializer.java
index b87a852..ed8b4d1 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/externalsort/serialize/impl/FixLengthIExternalSortFileDeserializer.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/externalsort/serialize/impl/FixLengthIExternalSortFileDeserializer.java
@@ -83,7 +83,7 @@ public class FixLengthIExternalSortFileDeserializer implements IExternalSortFile
   }
 
   private TSDataType readHeader() throws IOException {
-    return TSDataType.deserialize(ReadWriteIOUtils.readShort(inputStream));
+    return TSDataType.deserialize(ReadWriteIOUtils.readByte(inputStream));
   }
 
   private void setReader(TSDataType type) {


[iotdb] 09/11: have a good day

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 50b9da45446e553f89adcf1b106f8e391ce34474
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Mon Nov 30 20:59:58 2020 +0800

    have a good day
---
 .../apache/iotdb/tsfile/TsFileSequenceRead.java    |  40 ++++---
 .../iotdb/db/qp/physical/crud/InsertRowPlan.java   |   4 +-
 .../db/query/reader/series/SeriesReaderTest.java   |  19 ++-
 .../db/writelog/recover/SeqTsFileRecoverTest.java  |   2 +-
 .../iotdb/tsfile/file/header/ChunkHeader.java      |  16 ++-
 .../iotdb/tsfile/file/header/PageHeader.java       |  19 ++-
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 130 ++++++++++++++-------
 .../tsfile/read/reader/chunk/ChunkReader.java      |   2 +-
 .../apache/iotdb/tsfile/write/TsFileWriter.java    |   2 +-
 .../iotdb/tsfile/write/chunk/ChunkWriterImpl.java  |   4 +-
 .../iotdb/tsfile/file/header/PageHeaderTest.java   |   2 +-
 .../tsfile/read/TsFileSequenceReaderTest.java      |   3 +-
 12 files changed, 158 insertions(+), 85 deletions(-)

diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
index e9314fa..93ed9f0 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
@@ -41,18 +41,19 @@ public class TsFileSequenceRead {
 
   @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning
   public static void main(String[] args) throws IOException {
-    String filename = "test.tsfile";
+    String filename = "/Users/jackietien/Desktop/1-1-1-after.tsfile";
     if (args.length >= 1) {
       filename = args[0];
     }
     try (TsFileSequenceReader reader = new TsFileSequenceReader(filename)) {
-      System.out.println("file length: " + FSFactoryProducer.getFSFactory().getFile(filename).length());
+      System.out
+          .println("file length: " + FSFactoryProducer.getFSFactory().getFile(filename).length());
       System.out.println("file magic head: " + reader.readHeadMagic());
       System.out.println("file magic tail: " + reader.readTailMagic());
       System.out.println("Level 1 metadata position: " + reader.getFileMetadataPos());
       System.out.println("Level 1 metadata size: " + reader.getFileMetadataSize());
       // Sequential reading of one ChunkGroup now follows this order:
-      // first SeriesChunks (headers and data) in one ChunkGroup, then the CHUNK_GROUP_FOOTER
+      // first the CHUNK_GROUP_HEADER, then SeriesChunks (headers and data) in one ChunkGroup
       // Because we do not know how many chunks a ChunkGroup may have, we should read one byte (the marker) ahead and
       // judge accordingly.
       reader.position((long) TSFileConfig.MAGIC_STRING.getBytes().length + 1);
@@ -68,32 +69,39 @@ public class TsFileSequenceRead {
             ChunkHeader header = reader.readChunkHeader(marker);
             System.out.println("\tMeasurement: " + header.getMeasurementID());
             Decoder defaultTimeDecoder = Decoder.getDecoderByType(
-                    TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
-                    TSDataType.INT64);
+                TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
+                TSDataType.INT64);
             Decoder valueDecoder = Decoder
-                    .getDecoderByType(header.getEncodingType(), header.getDataType());
-            for (int j = 0; j < header.getNumOfPages(); j++) {
+                .getDecoderByType(header.getEncodingType(), header.getDataType());
+            int dataSize = header.getDataSize();
+            while (dataSize > 0) {
               valueDecoder.reset();
               System.out.println("\t\t[Page]\n \t\tPage head position: " + reader.position());
-              PageHeader pageHeader = reader.readPageHeader(header.getDataType());
+              PageHeader pageHeader = reader.readPageHeader(header.getDataType(),
+                  header.getChunkType() == MetaMarker.CHUNK_HEADER);
               System.out.println("\t\tPage data position: " + reader.position());
-              System.out.println("\t\tpoints in the page: " + pageHeader.getNumOfValues());
               ByteBuffer pageData = reader.readPage(pageHeader, header.getCompressionType());
               System.out
-                      .println("\t\tUncompressed page data size: " + pageHeader.getUncompressedSize());
+                  .println("\t\tUncompressed page data size: " + pageHeader.getUncompressedSize());
               PageReader reader1 = new PageReader(pageData, header.getDataType(), valueDecoder,
-                      defaultTimeDecoder, null);
+                  defaultTimeDecoder, null);
               BatchData batchData = reader1.getAllSatisfiedPageData();
+              if (header.getChunkType() == MetaMarker.CHUNK_HEADER) {
+                System.out.println("\t\tpoints in the page: " + pageHeader.getNumOfValues());
+              } else {
+                System.out.println("\t\tpoints in the page: " + batchData.length());
+              }
               while (batchData.hasCurrent()) {
                 System.out.println(
-                        "\t\t\ttime, value: " + batchData.currentTime() + ", " + batchData
-                                .currentValue());
+                    "\t\t\ttime, value: " + batchData.currentTime() + ", " + batchData
+                        .currentValue());
                 batchData.next();
               }
+              dataSize -= pageHeader.getSerializedPageSize();
             }
             break;
           case MetaMarker.CHUNK_GROUP_HEADER:
-            System.out.println("Chunk Group Footer position: " + reader.position());
+            System.out.println("Chunk Group Header position: " + reader.position());
             ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupHeader();
             System.out.println("device: " + chunkGroupHeader.getDeviceID());
             break;
@@ -108,8 +116,8 @@ public class TsFileSequenceRead {
       System.out.println("[Metadata]");
       for (String device : reader.getAllDevices()) {
         Map<String, List<ChunkMetadata>> seriesMetaData = reader.readChunkMetadataInDevice(device);
-        System.out.println(String
-                .format("\t[Device]Device %s, Number of Measurements %d", device, seriesMetaData.size()));
+        System.out.printf("\t[Device]Device %s, Number of Measurements %d%n", device,
+            seriesMetaData.size());
         for (Map.Entry<String, List<ChunkMetadata>> serie : seriesMetaData.entrySet()) {
           System.out.println("\t\tMeasurement:" + serie.getKey());
           for (ChunkMetadata chunkMetadata : serie.getValue()) {
diff --git a/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertRowPlan.java b/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertRowPlan.java
index 47f5afa..8c624cd 100644
--- a/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertRowPlan.java
+++ b/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertRowPlan.java
@@ -50,7 +50,7 @@ import org.slf4j.LoggerFactory;
 public class InsertRowPlan extends InsertPlan {
 
   private static final Logger logger = LoggerFactory.getLogger(InsertRowPlan.class);
-  private static final short TYPE_RAW_STRING = -1;
+  private static final byte TYPE_RAW_STRING = -1;
 
   private long time;
   private Object[] values;
@@ -357,7 +357,7 @@ public class InsertRowPlan extends InsertPlan {
     for (int i = 0; i < measurements.length; i++) {
       // types are not determined, the situation mainly occurs when the plan uses string values
       // and is forwarded to other nodes
-      short typeNum = ReadWriteIOUtils.readShort(buffer);
+      byte typeNum = (byte) ReadWriteIOUtils.read(buffer);
       if (typeNum == TYPE_RAW_STRING) {
         values[i] = ReadWriteIOUtils.readString(buffer);
         continue;
diff --git a/server/src/test/java/org/apache/iotdb/db/query/reader/series/SeriesReaderTest.java b/server/src/test/java/org/apache/iotdb/db/query/reader/series/SeriesReaderTest.java
index 9474a9b..4a9267f 100644
--- a/server/src/test/java/org/apache/iotdb/db/query/reader/series/SeriesReaderTest.java
+++ b/server/src/test/java/org/apache/iotdb/db/query/reader/series/SeriesReaderTest.java
@@ -19,13 +19,20 @@
 
 package org.apache.iotdb.db.query.reader.series;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 import org.apache.iotdb.db.engine.storagegroup.TsFileResource;
 import org.apache.iotdb.db.exception.StorageEngineException;
 import org.apache.iotdb.db.exception.metadata.IllegalPathException;
 import org.apache.iotdb.db.exception.metadata.MetadataException;
 import org.apache.iotdb.db.metadata.PartialPath;
 import org.apache.iotdb.db.query.context.QueryContext;
-import org.apache.iotdb.db.utils.TestOnly;
 import org.apache.iotdb.tsfile.exception.write.WriteProcessException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.read.TimeValuePair;
@@ -37,15 +44,6 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
 public class SeriesReaderTest {
 
   private static final String SERIES_READER_TEST_SG = "root.seriesReaderTest";
@@ -144,7 +142,6 @@ public class SeriesReaderTest {
       long expectedTime = 499;
       while (pointReader.hasNextTimeValuePair()) {
         TimeValuePair timeValuePair = pointReader.nextTimeValuePair();
-        System.out.println(timeValuePair);
         assertEquals(expectedTime, timeValuePair.getTimestamp());
         int value = timeValuePair.getValue().getInt();
         if (expectedTime < 200) {
diff --git a/server/src/test/java/org/apache/iotdb/db/writelog/recover/SeqTsFileRecoverTest.java b/server/src/test/java/org/apache/iotdb/db/writelog/recover/SeqTsFileRecoverTest.java
index d550f60..5a98ab2 100644
--- a/server/src/test/java/org/apache/iotdb/db/writelog/recover/SeqTsFileRecoverTest.java
+++ b/server/src/test/java/org/apache/iotdb/db/writelog/recover/SeqTsFileRecoverTest.java
@@ -73,7 +73,6 @@ public class SeqTsFileRecoverTest {
   private WriteLogNode node;
 
   private String logNodePrefix = TestConstant.BASE_OUTPUT_PATH.concat("testRecover");
-  private String storageGroup = "target";
   private TsFileResource resource;
   private VersionController versionController = new VersionController() {
     private int i;
@@ -136,6 +135,7 @@ public class SeqTsFileRecoverTest {
       }
     }
     writer.flushAllChunkGroups();
+    writer.writeVersion(0);
     writer.getIOWriter().close();
 
     node = MultiFileLogNodeManager.getInstance().getNode(logNodePrefix + tsF.getName());
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
index 62267e6..04942d8 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
@@ -81,9 +81,10 @@ public class ChunkHeader {
    */
   public static int getSerializedSize(String measurementID, int dataSize) {
     int measurementIdLength = measurementID.getBytes(TSFileConfig.STRING_CHARSET).length;
-    return ReadWriteForEncodingUtils.varIntSize(measurementIdLength) // measurementID length
+    return Byte.BYTES // chunkType
+        + ReadWriteForEncodingUtils.varIntSize(measurementIdLength) // measurementID length
         + measurementIdLength // measurementID
-        + ReadWriteForEncodingUtils.varIntSize(dataSize) // dataSize
+        + ReadWriteForEncodingUtils.uVarIntSize(dataSize) // dataSize
         + TSDataType.getSerializedSize() // dataType
         + CompressionType.getSerializedSize() // compressionType
         + TSEncoding.getSerializedSize(); // encodingType
@@ -96,9 +97,10 @@ public class ChunkHeader {
   public static int getSerializedSize(String measurementID) {
 
     int measurementIdLength = measurementID.getBytes(TSFileConfig.STRING_CHARSET).length;
-    return ReadWriteForEncodingUtils.varIntSize(measurementIdLength) // measurementID length
+    return  Byte.BYTES // chunkType
+        + ReadWriteForEncodingUtils.varIntSize(measurementIdLength) // measurementID length
         + measurementIdLength // measurementID
-        + Integer.BYTES + 1 // varInr dataSize
+        + Integer.BYTES + 1 // uVarInt dataSize
         + TSDataType.getSerializedSize() // dataType
         + CompressionType.getSerializedSize() // compressionType
         + TSEncoding.getSerializedSize(); // encodingType
@@ -142,7 +144,7 @@ public class ChunkHeader {
     CompressionType type = ReadWriteIOUtils.readCompressionType(buffer);
     TSEncoding encoding = ReadWriteIOUtils.readEncoding(buffer);
     chunkHeaderSize =
-        chunkHeaderSize - Integer.BYTES + ReadWriteForEncodingUtils.varIntSize(dataSize);
+        chunkHeaderSize - Integer.BYTES - 1 + ReadWriteForEncodingUtils.uVarIntSize(dataSize);
     return new ChunkHeader(chunkType, measurementID, dataSize, chunkHeaderSize, dataType, type,
         encoding);
   }
@@ -227,4 +229,8 @@ public class ChunkHeader {
   public byte getChunkType() {
     return chunkType;
   }
+
+  public void increasePageNums(int i) {
+    numOfPages += i;
+  }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
index 2c0acf9..a990430 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
@@ -47,11 +47,14 @@ public class PageHeader {
     return 2 * (Integer.BYTES + 1); // uncompressedSize, compressedSize
   }
 
-  public static PageHeader deserializeFrom(InputStream inputStream, TSDataType dataType)
-      throws IOException {
+  public static PageHeader deserializeFrom(InputStream inputStream, TSDataType dataType,
+      boolean hasStatistic) throws IOException {
     int uncompressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(inputStream);
     int compressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(inputStream);
-    Statistics statistics = Statistics.deserialize(inputStream, dataType);
+    Statistics statistics = null;
+    if (hasStatistic) {
+      statistics = Statistics.deserialize(inputStream, dataType);
+    }
     return new PageHeader(uncompressedSize, compressedSize, statistics);
   }
 
@@ -119,4 +122,14 @@ public class PageHeader {
   public void setModified(boolean modified) {
     this.modified = modified;
   }
+
+  /**
+   * max page header size without statistics
+   */
+  public int getSerializedPageSize() {
+    return ReadWriteForEncodingUtils.uVarIntSize(uncompressedSize)
+        + ReadWriteForEncodingUtils.uVarIntSize(compressedSize)
+        + (statistics == null ? 0 : statistics.getSerializedSize()) // page header
+        + compressedSize; // page data
+  }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index 5c7338d..c3d2c5a 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -38,6 +38,7 @@ import java.util.stream.Collectors;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.compress.IUnCompressor;
+import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
 import org.apache.iotdb.tsfile.file.MetaMarker;
 import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
@@ -51,12 +52,15 @@ import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
 import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
+import org.apache.iotdb.tsfile.read.common.BatchData;
 import org.apache.iotdb.tsfile.read.common.Chunk;
 import org.apache.iotdb.tsfile.read.common.Path;
 import org.apache.iotdb.tsfile.read.controller.MetadataQuerierByFileImpl;
 import org.apache.iotdb.tsfile.read.reader.TsFileInput;
+import org.apache.iotdb.tsfile.read.reader.page.PageReader;
 import org.apache.iotdb.tsfile.utils.BloomFilter;
 import org.apache.iotdb.tsfile.utils.Pair;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
@@ -75,7 +79,6 @@ public class TsFileSequenceReader implements AutoCloseable {
   private long fileMetadataPos;
   private int fileMetadataSize;
   private ByteBuffer markerBuffer = ByteBuffer.allocate(Byte.BYTES);
-  private int totalChunkNum;
   private TsFileMetadata tsFileMetaData;
   // device -> measurement -> TimeseriesMetadata
   private Map<String, Map<String, TimeseriesMetadata>> cachedDeviceMetadata = new ConcurrentHashMap<>();
@@ -206,10 +209,8 @@ public class TsFileSequenceReader implements AutoCloseable {
    * whether the file is a complete TsFile: only if the head magic and tail magic string exists.
    */
   public boolean isComplete() throws IOException {
-    return tsFileInput.size() >= TSFileConfig.MAGIC_STRING.getBytes().length * 2
-        + TSFileConfig.VERSION_NUMBER_V2.getBytes().length
-        && (readTailMagic().equals(readHeadMagic()) || readTailMagic()
-        .equals(TSFileConfig.VERSION_NUMBER_V1));
+    return tsFileInput.size() >= TSFileConfig.MAGIC_STRING.getBytes().length * 2 + Byte.BYTES
+        && (readTailMagic().equals(readHeadMagic()));
   }
 
   /**
@@ -763,8 +764,8 @@ public class TsFileSequenceReader implements AutoCloseable {
    *
    * @param type given tsfile data type
    */
-  public PageHeader readPageHeader(TSDataType type) throws IOException {
-    return PageHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), type);
+  public PageHeader readPageHeader(TSDataType type, boolean hasStatistic) throws IOException {
+    return PageHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), type, hasStatistic);
   }
 
   public long position() throws IOException {
@@ -902,11 +903,9 @@ public class TsFileSequenceReader implements AutoCloseable {
     long fileOffsetOfChunk;
 
     // ChunkMetadata of current ChunkGroup
-    List<ChunkMetadata> chunkMetadataList = null;
-    String deviceID;
+    List<ChunkMetadata> chunkMetadataList = new ArrayList<>();
 
-    int headerLength = TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
-        .getBytes().length;
+    int headerLength = TSFileConfig.MAGIC_STRING.getBytes().length + Byte.BYTES;
     if (fileSize < headerLength) {
       return TsFileCheckStatus.INCOMPATIBLE_FILE;
     }
@@ -924,22 +923,16 @@ public class TsFileSequenceReader implements AutoCloseable {
         return TsFileCheckStatus.COMPLETE_FILE;
       }
     }
-    boolean newChunkGroup = true;
     // not a complete file, we will recover it...
     long truncatedSize = headerLength;
     byte marker;
-    int chunkCnt = 0;
+    String lastDeviceId = null;
     List<MeasurementSchema> measurementSchemaList = new ArrayList<>();
     try {
       while ((marker = this.readMarker()) != MetaMarker.SEPARATOR) {
         switch (marker) {
           case MetaMarker.CHUNK_HEADER:
           case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
-            // this is the first chunk of a new ChunkGroup.
-            if (newChunkGroup) {
-              newChunkGroup = false;
-              chunkMetadataList = new ArrayList<>();
-            }
             fileOffsetOfChunk = this.position() - 1;
             // if there is something wrong with a chunk, we will drop the whole ChunkGroup
             // as different chunks may be created by the same insertions(sqls), and partial
@@ -952,37 +945,96 @@ public class TsFileSequenceReader implements AutoCloseable {
             measurementSchemaList.add(measurementSchema);
             dataType = chunkHeader.getDataType();
             Statistics<?> chunkStatistics = Statistics.getStatsByType(dataType);
-            for (int j = 0; j < chunkHeader.getNumOfPages(); j++) {
-              // a new Page
-              PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType());
-              chunkStatistics.mergeStatistics(pageHeader.getStatistics());
-              this.skipPageData(pageHeader);
+            int dataSize = chunkHeader.getDataSize();
+            if (chunkHeader.getChunkType() == MetaMarker.CHUNK_HEADER) {
+              while (dataSize > 0) {
+                // a new Page
+                PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType(), true);
+                chunkStatistics.mergeStatistics(pageHeader.getStatistics());
+                this.skipPageData(pageHeader);
+                dataSize -= pageHeader.getSerializedPageSize();
+                chunkHeader.increasePageNums(1);
+              }
+            } else {
+              // only one page without statistic, we need to iterate each point to generate statistic
+              PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType(), false);
+              Decoder valueDecoder = Decoder
+                  .getDecoderByType(chunkHeader.getEncodingType(), chunkHeader.getDataType());
+              ByteBuffer pageData = readPage(pageHeader, chunkHeader.getCompressionType());
+              Decoder timeDecoder = Decoder.getDecoderByType(
+                  TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
+                  TSDataType.INT64);
+              PageReader reader = new PageReader(pageHeader, pageData, chunkHeader.getDataType(),
+                  valueDecoder, timeDecoder, null);
+              BatchData batchData = reader.getAllSatisfiedPageData();
+              while (batchData.hasCurrent()) {
+                switch (dataType) {
+                  case INT32:
+                    chunkStatistics.update(batchData.currentTime(), batchData.getInt());
+                    break;
+                  case INT64:
+                    chunkStatistics.update(batchData.currentTime(), batchData.getLong());
+                    break;
+                  case FLOAT:
+                    chunkStatistics.update(batchData.currentTime(), batchData.getFloat());
+                    break;
+                  case DOUBLE:
+                    chunkStatistics.update(batchData.currentTime(), batchData.getDouble());
+                    break;
+                  case BOOLEAN:
+                    chunkStatistics.update(batchData.currentTime(), batchData.getBoolean());
+                    break;
+                  case TEXT:
+                    chunkStatistics.update(batchData.currentTime(), batchData.getBinary());
+                    break;
+                  default:
+                    throw new IOException("Unexpected type " + dataType);
+                }
+                batchData.next();
+              }
+              chunkHeader.increasePageNums(1);
             }
             currentChunk = new ChunkMetadata(measurementID, dataType, fileOffsetOfChunk,
                 chunkStatistics);
             chunkMetadataList.add(currentChunk);
-            chunkCnt++;
             break;
           case MetaMarker.CHUNK_GROUP_HEADER:
-            // this is a chunk group
+            if (lastDeviceId != null) {
+              // schema of last chunk group
+              if (newSchema != null) {
+                for (MeasurementSchema tsSchema : measurementSchemaList) {
+                  newSchema
+                      .putIfAbsent(new Path(lastDeviceId, tsSchema.getMeasurementId()), tsSchema);
+                }
+              }
+              measurementSchemaList = new ArrayList<>();
+              // last chunk group Metadata
+              chunkGroupMetadataList.add(new ChunkGroupMetadata(lastDeviceId, chunkMetadataList));
+            }
             // if there is something wrong with the ChunkGroup Footer, we will drop this ChunkGroup
             // because we can not guarantee the correctness of the deviceId.
+            truncatedSize = this.position() - 1;
+            // this is a chunk group
+            chunkMetadataList = new ArrayList<>();
             ChunkGroupHeader chunkGroupHeader = this.readChunkGroupHeader();
-            deviceID = chunkGroupHeader.getDeviceID();
-            if (newSchema != null) {
-              for (MeasurementSchema tsSchema : measurementSchemaList) {
-                newSchema.putIfAbsent(new Path(deviceID, tsSchema.getMeasurementId()), tsSchema);
+            lastDeviceId = chunkGroupHeader.getDeviceID();
+            break;
+          case MetaMarker.VERSION:
+            if (lastDeviceId != null) {
+              // schema of last chunk group
+              if (newSchema != null) {
+                for (MeasurementSchema tsSchema : measurementSchemaList) {
+                  newSchema
+                      .putIfAbsent(new Path(lastDeviceId, tsSchema.getMeasurementId()), tsSchema);
+                }
               }
+              measurementSchemaList = new ArrayList<>();
+              // last chunk group Metadata
+              chunkGroupMetadataList.add(new ChunkGroupMetadata(lastDeviceId, chunkMetadataList));
+              lastDeviceId = null;
             }
-            chunkGroupMetadataList.add(new ChunkGroupMetadata(deviceID, chunkMetadataList));
-            newChunkGroup = true;
-            truncatedSize = this.position();
 
-            totalChunkNum += chunkCnt;
-            chunkCnt = 0;
-            measurementSchemaList = new ArrayList<>();
-            break;
-          case MetaMarker.VERSION:
+            chunkMetadataList = new ArrayList<>();
             long version = readVersion();
             versionInfo.add(new Pair<>(position(), version));
             truncatedSize = this.position();
@@ -1004,10 +1056,6 @@ public class TsFileSequenceReader implements AutoCloseable {
     return truncatedSize;
   }
 
-  public int getTotalChunkNum() {
-    return totalChunkNum;
-  }
-
   /**
    * get ChunkMetaDatas of given path
    *
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
index 729b8f7..2250307 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
@@ -45,7 +45,7 @@ public class ChunkReader implements IChunkReader {
   private ChunkHeader chunkHeader;
   private ByteBuffer chunkDataBuffer;
   private IUnCompressor unCompressor;
-  private Decoder timeDecoder = Decoder.getDecoderByType(
+  private final Decoder timeDecoder = Decoder.getDecoderByType(
       TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
       TSDataType.INT64);
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
index 5346847..6b890dd 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
@@ -314,10 +314,10 @@ public class TsFileWriter implements AutoCloseable {
   public boolean flushAllChunkGroups() throws IOException {
     if (recordCount > 0) {
       for (Map.Entry<String, IChunkGroupWriter> entry : groupWriters.entrySet()) {
-        long pos = fileWriter.getPos();
         String deviceId = entry.getKey();
         IChunkGroupWriter groupWriter = entry.getValue();
         fileWriter.startChunkGroup(deviceId);
+        long pos = fileWriter.getPos();
         long dataSize = groupWriter.flushToFileWriter(fileWriter);
         if (fileWriter.getPos() - pos != dataSize) {
           throw new IOException(
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
index 107f026..44c2dae 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
@@ -213,9 +213,9 @@ public class ChunkWriterImpl implements IChunkWriter {
       } else if (numOfPages == 1) { // put the firstPageStatistics into pageBuffer
         byte[] b = pageBuffer.toByteArray();
         pageBuffer.reset();
-        pageBuffer.write(b, 0, sizeWithoutStatistic);
+        pageBuffer.write(b, 0, this.sizeWithoutStatistic);
         firstPageStatistics.serialize(pageBuffer);
-        pageBuffer.write(b, sizeWithoutStatistic, b.length - sizeWithoutStatistic);
+        pageBuffer.write(b, this.sizeWithoutStatistic, b.length - this.sizeWithoutStatistic);
         firstPageStatistics = null;
       }
 
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/header/PageHeaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/header/PageHeaderTest.java
index 3114159..ef87b31 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/header/PageHeaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/header/PageHeaderTest.java
@@ -70,7 +70,7 @@ public class PageHeaderTest {
     PageHeader header = null;
     try {
       fis = new FileInputStream(new File(PATH));
-      header = PageHeader.deserializeFrom(fis, DATA_TYPE);
+      header = PageHeader.deserializeFrom(fis, DATA_TYPE, true);
       return header;
     } catch (IOException e) {
       e.printStackTrace();
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
index c9931fc..4e8b7e0 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
@@ -75,7 +75,8 @@ public class TsFileSequenceReaderTest {
         case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
           ChunkHeader header = reader.readChunkHeader(marker);
           for (int j = 0; j < header.getNumOfPages(); j++) {
-            PageHeader pageHeader = reader.readPageHeader(header.getDataType());
+            PageHeader pageHeader = reader.readPageHeader(header.getDataType(),
+                header.getChunkType() == MetaMarker.CHUNK_HEADER);
             reader.readPage(pageHeader, header.getCompressionType());
           }
           break;


[iotdb] 01/11: init

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 3d01eec2597b8e313ff418746d787002c194a8ed
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Wed Nov 25 13:48:39 2020 +0800

    init
---
 .../apache/iotdb/tsfile/TsFileSequenceRead.java    |  13 +-
 .../apache/iotdb/hadoop/fileSystem/HDFSInput.java  |  11 +
 .../apache/iotdb/hadoop/fileSystem/HDFSOutput.java |   5 +
 .../iotdb/db/engine/flush/MemTableFlushTask.java   |   2 +-
 .../iotdb/db/query/control/FileReaderManager.java  |   2 +-
 .../apache/iotdb/db/tools/TsFileSketchTool.java    |  16 +-
 .../db/tools/upgrade/TsFileOnlineUpgradeTool.java  |  14 +-
 .../iotdb/tsfile/common/conf/TSFileConfig.java     |   6 +-
 .../org/apache/iotdb/tsfile/file/MetaMarker.java   |   9 +-
 ...ChunkGroupFooter.java => ChunkGroupHeader.java} |  69 +---
 .../iotdb/tsfile/file/header/ChunkHeader.java      | 114 +++---
 .../iotdb/tsfile/file/header/PageHeader.java       |  22 +-
 .../file/metadata/statistics/BinaryStatistics.java |   9 +-
 .../metadata/statistics/BooleanStatistics.java     |  36 +-
 .../metadata/statistics/IntegerStatistics.java     |  44 ++-
 .../file/metadata/statistics/Statistics.java       |  18 +-
 .../iotdb/tsfile/read/TsFileSequenceReader.java    |  42 +--
 .../iotdb/tsfile/read/reader/LocalTsFileInput.java |  10 +
 .../iotdb/tsfile/read/reader/TsFileInput.java      |   5 +
 .../tsfile/utils/ReadWriteForEncodingUtils.java    |  90 ++++-
 .../iotdb/tsfile/utils/ReadWriteIOUtils.java       |  94 ++++-
 .../v1/file/metadata/ChunkGroupMetaDataV1.java     | 118 ------
 .../tsfile/v1/file/metadata/ChunkMetadataV1.java   | 131 -------
 .../v1/file/metadata/TimeseriesMetadataForV1.java  |  42 ---
 .../v1/file/metadata/TsDeviceMetadataIndexV1.java  |  78 ----
 .../v1/file/metadata/TsDeviceMetadataV1.java       |  87 -----
 .../iotdb/tsfile/v1/file/metadata/TsDigestV1.java  |  75 ----
 .../tsfile/v1/file/metadata/TsFileMetadataV1.java  | 106 ------
 .../metadata/statistics/BinaryStatisticsV1.java    |  84 -----
 .../metadata/statistics/BooleanStatisticsV1.java   |  80 ----
 .../metadata/statistics/DoubleStatisticsV1.java    |  79 ----
 .../metadata/statistics/FloatStatisticsV1.java     |  79 ----
 .../metadata/statistics/IntegerStatisticsV1.java   |  79 ----
 .../file/metadata/statistics/LongStatisticsV1.java |  80 ----
 .../v1/file/metadata/statistics/StatisticsV1.java  | 225 ------------
 .../iotdb/tsfile/v1/file/utils/HeaderUtils.java    | 141 -------
 .../tsfile/v1/read/TsFileSequenceReaderForV1.java  | 409 ---------------------
 .../iotdb/tsfile/write/chunk/ChunkWriterImpl.java  |  53 +--
 .../apache/iotdb/tsfile/write/page/PageWriter.java |  31 +-
 .../tsfile/write/writer/LocalTsFileOutput.java     |   5 +
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  |  19 +-
 .../iotdb/tsfile/write/writer/TsFileOutput.java    |   8 +
 .../tsfile/read/TsFileSequenceReaderTest.java      |  11 +-
 .../iotdb/tsfile/write/TsFileIOWriterTest.java     |  19 +-
 .../write/writer/RestorableTsFileIOWriterTest.java |   8 +-
 45 files changed, 508 insertions(+), 2170 deletions(-)

diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
index da5bae6..818ac3b 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
@@ -26,7 +26,7 @@ import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
@@ -55,7 +55,7 @@ public class TsFileSequenceRead {
       // first SeriesChunks (headers and data) in one ChunkGroup, then the CHUNK_GROUP_FOOTER
       // Because we do not know how many chunks a ChunkGroup may have, we should read one byte (the marker) ahead and
       // judge accordingly.
-      reader.position((long) TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
+      reader.position((long) TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
               .getBytes().length);
       System.out.println("[Chunk Group]");
       System.out.println("position: " + reader.position());
@@ -63,9 +63,10 @@ public class TsFileSequenceRead {
       while ((marker = reader.readMarker()) != MetaMarker.SEPARATOR) {
         switch (marker) {
           case MetaMarker.CHUNK_HEADER:
+          case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
             System.out.println("\t[Chunk]");
             System.out.println("\tposition: " + reader.position());
-            ChunkHeader header = reader.readChunkHeader();
+            ChunkHeader header = reader.readChunkHeader(marker);
             System.out.println("\tMeasurement: " + header.getMeasurementID());
             Decoder defaultTimeDecoder = Decoder.getDecoderByType(
                     TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
@@ -92,10 +93,10 @@ public class TsFileSequenceRead {
               }
             }
             break;
-          case MetaMarker.CHUNK_GROUP_FOOTER:
+          case MetaMarker.CHUNK_GROUP_HEADER:
             System.out.println("Chunk Group Footer position: " + reader.position());
-            ChunkGroupFooter chunkGroupFooter = reader.readChunkGroupFooter();
-            System.out.println("device: " + chunkGroupFooter.getDeviceID());
+            ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupFooter();
+            System.out.println("device: " + chunkGroupHeader.getDeviceID());
             break;
           case MetaMarker.VERSION:
             long version = reader.readVersion();
diff --git a/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSInput.java b/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSInput.java
index 0b7da82..f453d3a 100644
--- a/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSInput.java
+++ b/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSInput.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.iotdb.tsfile.read.reader.TsFileInput;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class HDFSInput implements TsFileInput {
 
@@ -126,4 +127,14 @@ public class HDFSInput implements TsFileInput {
   public int readInt() throws IOException {
     throw new UnsupportedOperationException();
   }
+
+  @Override
+  public String readVarIntString(long position) throws IOException {
+    long srcPosition = fsDataInputStream.getPos();
+
+    fsDataInputStream.seek(position);
+    String res = ReadWriteIOUtils.readVarIntString(fsDataInputStream);
+    fsDataInputStream.seek(srcPosition);
+    return res;
+  }
 }
diff --git a/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSOutput.java b/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSOutput.java
index aec1a58..2920311 100644
--- a/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSOutput.java
+++ b/hadoop/src/main/java/org/apache/iotdb/hadoop/fileSystem/HDFSOutput.java
@@ -61,6 +61,11 @@ public class HDFSOutput implements TsFileOutput {
     fsDataOutputStream.write(b);
   }
 
+  @Override
+  public void write(byte b) throws IOException {
+    fsDataOutputStream.write(b);
+  }
+
   public void write(ByteBuffer b) throws IOException {
     throw new UnsupportedOperationException("Unsupported operation.");
   }
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/flush/MemTableFlushTask.java b/server/src/main/java/org/apache/iotdb/db/engine/flush/MemTableFlushTask.java
index e9f4c92..fbc9877 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/flush/MemTableFlushTask.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/flush/MemTableFlushTask.java
@@ -75,7 +75,7 @@ public class MemTableFlushTask {
    * the function for flushing memtable.
    */
   public void syncFlushMemTable()
-      throws ExecutionException, InterruptedException, IOException {
+      throws ExecutionException, InterruptedException {
     logger.info("The memTable size of SG {} is {}, the avg series points num in chunk is {} ",
         storageGroup,
         memTable.memSize(),
diff --git a/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java b/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
index 2b60229..776fcdb 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
@@ -172,7 +172,7 @@ public class FileReaderManager implements IService {
             tsFileReader.close();
             tsFileReader = new TsFileSequenceReaderForV1(filePath);
             break;
-          case TSFileConfig.VERSION_NUMBER:
+          case TSFileConfig.VERSION_NUMBER_V2:
             break;
           default:
             throw new IOException("The version of this TsFile is not corrent. ");
diff --git a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
index a558015..ca1b26c 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
@@ -28,7 +28,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexEntry;
@@ -105,25 +105,25 @@ public class TsFileSketchTool {
           }
           // chunkGroupFooter begins
           printlnBoth(pw, String.format("%20s", chunkEndPos) + "|\t[Chunk Group Footer]");
-          ChunkGroupFooter chunkGroupFooter = reader.readChunkGroupFooter(chunkEndPos, false);
+          ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupFooter(chunkEndPos, false);
           printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 0");
           printlnBoth(pw,
-                  String.format("%20s", "") + "|\t\t[deviceID] " + chunkGroupFooter.getDeviceID());
+                  String.format("%20s", "") + "|\t\t[deviceID] " + chunkGroupHeader.getDeviceID());
           printlnBoth(pw,
-                  String.format("%20s", "") + "|\t\t[dataSize] " + chunkGroupFooter.getDataSize());
-          printlnBoth(pw, String.format("%20s", "") + "|\t\t[num of chunks] " + chunkGroupFooter
+                  String.format("%20s", "") + "|\t\t[dataSize] " + chunkGroupHeader.getDataSize());
+          printlnBoth(pw, String.format("%20s", "") + "|\t\t[num of chunks] " + chunkGroupHeader
                   .getNumberOfChunks());
           printlnBoth(pw, str1.toString() + "\t[Chunk Group] of "
                   + chunkGroupMetadata.getDevice() + " ends");
           // versionInfo begins if there is a versionInfo
-          if (versionMap.containsKey(chunkEndPos + chunkGroupFooter.getSerializedSize())) {
+          if (versionMap.containsKey(chunkEndPos + chunkGroupHeader.getSerializedSize())) {
             printlnBoth(pw,
-                    String.format("%20s", chunkEndPos + chunkGroupFooter.getSerializedSize())
+                    String.format("%20s", chunkEndPos + chunkGroupHeader.getSerializedSize())
                             + "|\t[Version Info]");
             printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 3");
             printlnBoth(pw,
                     String.format("%20s", "") + "|\t\t[version] "
-                            + versionMap.get(chunkEndPos + chunkGroupFooter.getSerializedSize()));
+                            + versionMap.get(chunkEndPos + chunkGroupHeader.getSerializedSize()));
           }
         }
 
diff --git a/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java b/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java
index 405b4c2..52b7eb3 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java
@@ -36,7 +36,7 @@ import org.apache.iotdb.tsfile.exception.write.PageException;
 import org.apache.iotdb.tsfile.exception.write.UnSupportedDataTypeException;
 import org.apache.iotdb.tsfile.exception.write.WriteProcessException;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
@@ -185,7 +185,7 @@ public class TsFileOnlineUpgradeTool implements AutoCloseable {
    */
   public String readVersionNumber() throws IOException {
     ByteBuffer versionNumberBytes = ByteBuffer
-        .allocate(TSFileConfig.VERSION_NUMBER.getBytes().length);
+        .allocate(TSFileConfig.VERSION_NUMBER_V2.getBytes().length);
     tsFileInput.position(TSFileConfig.MAGIC_STRING.getBytes().length);
     tsFileInput.read(versionNumberBytes);
     versionNumberBytes.flip();
@@ -213,8 +213,8 @@ public class TsFileOnlineUpgradeTool implements AutoCloseable {
    * @return a CHUNK_GROUP_FOOTER
    * @throws IOException io error
    */
-  public ChunkGroupFooter readChunkGroupFooter() throws IOException {
-    return ChunkGroupFooter.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
+  public ChunkGroupHeader readChunkGroupFooter() throws IOException {
+    return ChunkGroupHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
   }
 
   /**
@@ -374,10 +374,10 @@ public class TsFileOnlineUpgradeTool implements AutoCloseable {
             pageDataInChunkGroup.add(dataInChunk);
             pagePartitionInfoInChunkGroup.add(pagePartitionInfo);
             break;
-          case MetaMarker.CHUNK_GROUP_FOOTER:
+          case MetaMarker.CHUNK_GROUP_HEADER:
             // this is the footer of a ChunkGroup.
-            ChunkGroupFooter chunkGroupFooter = this.readChunkGroupFooter();
-            String deviceID = chunkGroupFooter.getDeviceID();
+            ChunkGroupHeader chunkGroupHeader = this.readChunkGroupFooter();
+            String deviceID = chunkGroupHeader.getDeviceID();
             rewrite(oldTsFile, deviceID, measurementSchemaList, pageHeadersInChunkGroup,
                 pageDataInChunkGroup, versionOfChunkGroup, pagePartitionInfoInChunkGroup);
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/common/conf/TSFileConfig.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/common/conf/TSFileConfig.java
index 24e4dde..2193786 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/common/conf/TSFileConfig.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/common/conf/TSFileConfig.java
@@ -61,8 +61,12 @@ public class TSFileConfig implements Serializable {
   public static final Charset STRING_CHARSET = Charset.forName(STRING_ENCODING);
   public static final String CONFIG_FILE_NAME = "iotdb-engine.properties";
   public static final String MAGIC_STRING = "TsFile";
-  public static final String VERSION_NUMBER = "000002";
+  public static final String VERSION_NUMBER_V2 = "000002";
   public static final String VERSION_NUMBER_V1 = "000001";
+  /**
+   * version number is changed to use 1 byte to represent since version 3
+   */
+  public static final byte VERSION_NUMBER = 0x03;
 
   /**
    * Bloom filter constrain
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/MetaMarker.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/MetaMarker.java
index 758f0d5..c9086b3 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/MetaMarker.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/MetaMarker.java
@@ -26,10 +26,17 @@ import java.io.IOException;
  */
 public class MetaMarker {
 
-  public static final byte CHUNK_GROUP_FOOTER = 0;
+  public static final byte CHUNK_GROUP_HEADER = 0;
+  /**
+   * means this chunk has more than one page
+   */
   public static final byte CHUNK_HEADER = 1;
   public static final byte SEPARATOR = 2;
   public static final byte VERSION = 3;
+  /**
+   * means this chunk has only one page
+   */
+  public static final byte ONLY_ONE_PAGE_CHUNK_HEADER = 4;
 
   private MetaMarker() {
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupFooter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupHeader.java
similarity index 56%
rename from tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupFooter.java
rename to tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupHeader.java
index 3bc85d0..66afbd8 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupFooter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupHeader.java
@@ -22,40 +22,28 @@ package org.apache.iotdb.tsfile.file.footer;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.io.UnsupportedEncodingException;
-import java.nio.ByteBuffer;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.file.MetaMarker;
 import org.apache.iotdb.tsfile.read.reader.TsFileInput;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
-public class ChunkGroupFooter {
-
-  private static final byte MARKER = MetaMarker.CHUNK_GROUP_FOOTER;
+public class ChunkGroupHeader {
 
-  private String deviceID;
+  private static final byte MARKER = MetaMarker.CHUNK_GROUP_HEADER;
 
-  private long dataSize;
-
-  private int numberOfChunks;
+  private final String deviceID;
 
   // this field does not need to be serialized.
   private int serializedSize;
 
   /**
    * constructor of CHUNK_GROUP_FOOTER.
+   *  @param deviceID       device ID
    *
-   * @param deviceID       device ID
-   * @param dataSize       data size
-   * @param numberOfChunks number of chunks
    */
-  public ChunkGroupFooter(String deviceID, long dataSize, int numberOfChunks) throws UnsupportedEncodingException {
+  public ChunkGroupHeader(String deviceID) {
     this.deviceID = deviceID;
-    this.dataSize = dataSize;
-    this.numberOfChunks = numberOfChunks;
-    this.serializedSize = Byte.BYTES + Integer.BYTES + deviceID.getBytes(TSFileConfig.STRING_CHARSET).length
-        + Long.BYTES + Integer.BYTES;
+    this.serializedSize = Byte.BYTES + Integer.BYTES + deviceID.getBytes(TSFileConfig.STRING_CHARSET).length;
   }
 
   public static int getSerializedSize(String deviceID) {
@@ -71,7 +59,7 @@ public class ChunkGroupFooter {
    *
    * @param markerRead Whether the marker of the CHUNK_GROUP_FOOTER is read ahead.
    */
-  public static ChunkGroupFooter deserializeFrom(InputStream inputStream, boolean markerRead) throws IOException {
+  public static ChunkGroupHeader deserializeFrom(InputStream inputStream, boolean markerRead) throws IOException {
     if (!markerRead) {
       byte marker = (byte) inputStream.read();
       if (marker != MARKER) {
@@ -79,10 +67,8 @@ public class ChunkGroupFooter {
       }
     }
 
-    String deviceID = ReadWriteIOUtils.readString(inputStream);
-    long dataSize = ReadWriteIOUtils.readLong(inputStream);
-    int numOfChunks = ReadWriteIOUtils.readInt(inputStream);
-    return new ChunkGroupFooter(deviceID, dataSize, numOfChunks);
+    String deviceID = ReadWriteIOUtils.readVarIntString(inputStream);
+    return new ChunkGroupHeader(deviceID);
   }
 
   /**
@@ -90,24 +76,14 @@ public class ChunkGroupFooter {
    *
    * @param markerRead Whether the marker of the CHUNK_GROUP_FOOTER is read ahead.
    */
-  public static ChunkGroupFooter deserializeFrom(TsFileInput input, long offset, boolean markerRead)
+  public static ChunkGroupHeader deserializeFrom(TsFileInput input, long offset, boolean markerRead)
       throws IOException {
     long offsetVar = offset;
     if (!markerRead) {
       offsetVar++;
     }
-    ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES);
-    input.read(buffer, offsetVar);
-    buffer.flip();
-    int size = buffer.getInt();
-    offsetVar += Integer.BYTES;
-    buffer = ByteBuffer.allocate(getSerializedSize(size));
-    ReadWriteIOUtils.readAsPossible(input, offsetVar, buffer);
-    buffer.flip();
-    String deviceID = ReadWriteIOUtils.readStringWithLength(buffer, size);
-    long dataSize = ReadWriteIOUtils.readLong(buffer);
-    int numOfChunks = ReadWriteIOUtils.readInt(buffer);
-    return new ChunkGroupFooter(deviceID, dataSize, numOfChunks);
+    String deviceID = input.readVarIntString(offsetVar);
+    return new ChunkGroupHeader(deviceID);
   }
 
   public int getSerializedSize() {
@@ -118,17 +94,6 @@ public class ChunkGroupFooter {
     return deviceID;
   }
 
-  public long getDataSize() {
-    return dataSize;
-  }
-
-  public void setDataSize(long dataSize) {
-    this.dataSize = dataSize;
-  }
-
-  public int getNumberOfChunks() {
-    return numberOfChunks;
-  }
 
   /**
    * serialize to outputStream.
@@ -140,15 +105,15 @@ public class ChunkGroupFooter {
   public int serializeTo(OutputStream outputStream) throws IOException {
     int length = 0;
     length += ReadWriteIOUtils.write(MARKER, outputStream);
-    length += ReadWriteIOUtils.write(deviceID, outputStream);
-    length += ReadWriteIOUtils.write(dataSize, outputStream);
-    length += ReadWriteIOUtils.write(numberOfChunks, outputStream);
+    length += ReadWriteIOUtils.writeVar(deviceID, outputStream);
     return length;
   }
 
   @Override
   public String toString() {
-    return "CHUNK_GROUP_FOOTER{" + "deviceID='" + deviceID + '\'' + ", dataSize=" + dataSize + ", numberOfChunks="
-        + numberOfChunks + ", serializedSize=" + serializedSize + '}';
+    return "ChunkGroupHeader{" +
+        "deviceID='" + deviceID + '\'' +
+        ", serializedSize=" + serializedSize +
+        '}';
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
index 96556b8..c318d44 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
@@ -25,6 +25,7 @@ import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.read.reader.TsFileInput;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 import java.io.IOException;
 import java.io.InputStream;
@@ -33,101 +34,118 @@ import java.nio.ByteBuffer;
 
 public class ChunkHeader {
 
+
   private String measurementID;
   private int dataSize;
   private TSDataType dataType;
   private CompressionType compressionType;
   private TSEncoding encodingType;
-  private int numOfPages;
 
-  // this field does not need to be serialized.
+  // the following fields do not need to be serialized.
+  /**
+   * 1 means this chunk has more than one page, so each page has its own page statistic 4 means this
+   * chunk has only one page, and this page has no page statistic
+   */
+  private byte chunkType;
+  private int numOfPages;
   private int serializedSize;
 
   public ChunkHeader(String measurementID, int dataSize, TSDataType dataType,
-      CompressionType compressionType,
-      TSEncoding encoding, int numOfPages) {
-    this(measurementID, dataSize, getSerializedSize(measurementID), dataType, compressionType,
-        encoding, numOfPages);
+      CompressionType compressionType, TSEncoding encoding, int numOfPages) {
+    this(numOfPages <= 1 ? MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER : MetaMarker.CHUNK_HEADER,
+        measurementID, dataSize, getSerializedSize(measurementID, dataSize), dataType,
+        compressionType,
+        encoding);
+    this.numOfPages = numOfPages;
   }
 
-  private ChunkHeader(String measurementID, int dataSize, int headerSize, TSDataType dataType,
-      CompressionType compressionType, TSEncoding encoding, int numOfPages) {
+  public ChunkHeader(byte chunkType, String measurementID, int dataSize, TSDataType dataType,
+      CompressionType compressionType, TSEncoding encoding) {
+    this(chunkType, measurementID, dataSize, getSerializedSize(measurementID, dataSize), dataType,
+        compressionType, encoding);
+  }
+
+  private ChunkHeader(byte chunkType, String measurementID, int dataSize, int headerSize,
+      TSDataType dataType, CompressionType compressionType, TSEncoding encoding) {
+    this.chunkType = chunkType;
     this.measurementID = measurementID;
     this.dataSize = dataSize;
     this.dataType = dataType;
     this.compressionType = compressionType;
-    this.numOfPages = numOfPages;
     this.encodingType = encoding;
     this.serializedSize = headerSize;
   }
 
+  /**
+   * the exact serialized size of chunk header
+   */
+  public static int getSerializedSize(String measurementID, int dataSize) {
+    int measurementIdLength = measurementID.getBytes(TSFileConfig.STRING_CHARSET).length;
+    return ReadWriteForEncodingUtils.varIntSize(measurementIdLength) // measurementID length
+        + measurementIdLength // measurementID
+        + ReadWriteForEncodingUtils.varIntSize(dataSize) // dataSize
+        + TSDataType.getSerializedSize() // dataType
+        + CompressionType.getSerializedSize() // compressionType
+        + TSEncoding.getSerializedSize(); // encodingType
+  }
+
+  /**
+   * The estimated serialized size of chunk header. Only used when we don't know the actual dataSize
+   * attribute
+   */
   public static int getSerializedSize(String measurementID) {
-    return Byte.BYTES // marker
-        + Integer.BYTES // measurementID length
-        + measurementID.getBytes(TSFileConfig.STRING_CHARSET).length // measurementID
-        + Integer.BYTES // dataSize
+
+    int measurementIdLength = measurementID.getBytes(TSFileConfig.STRING_CHARSET).length;
+    return ReadWriteForEncodingUtils.varIntSize(measurementIdLength) // measurementID length
+        + measurementIdLength // measurementID
+        + Integer.BYTES + 1 // varInr dataSize
         + TSDataType.getSerializedSize() // dataType
         + CompressionType.getSerializedSize() // compressionType
-        + TSEncoding.getSerializedSize() // encodingType
-        + Integer.BYTES; // numOfPages
+        + TSEncoding.getSerializedSize(); // encodingType
   }
 
   /**
    * deserialize from inputStream.
-   *
-   * @param markerRead Whether the marker of the CHUNK_HEADER has been read
    */
-  public static ChunkHeader deserializeFrom(InputStream inputStream, boolean markerRead)
+  public static ChunkHeader deserializeFrom(InputStream inputStream, byte chunkType)
       throws IOException {
-    if (!markerRead) {
-      byte marker = (byte) inputStream.read();
-      if (marker != MetaMarker.CHUNK_HEADER) {
-        MetaMarker.handleUnexpectedMarker(marker);
-      }
-    }
-
-    String measurementID = ReadWriteIOUtils.readString(inputStream);
-    int dataSize = ReadWriteIOUtils.readInt(inputStream);
-    TSDataType dataType = TSDataType.deserialize(ReadWriteIOUtils.readShort(inputStream));
-    int numOfPages = ReadWriteIOUtils.readInt(inputStream);
+    // read measurementID
+    String measurementID = ReadWriteIOUtils.readVarIntString(inputStream);
+    int dataSize = ReadWriteForEncodingUtils.readVarInt(inputStream);
+    TSDataType dataType = ReadWriteIOUtils.readDataType(inputStream);
     CompressionType type = ReadWriteIOUtils.readCompressionType(inputStream);
     TSEncoding encoding = ReadWriteIOUtils.readEncoding(inputStream);
-    return new ChunkHeader(measurementID, dataSize, dataType, type, encoding, numOfPages);
+    return new ChunkHeader(chunkType, measurementID, dataSize, dataType, type, encoding);
   }
 
   /**
    * deserialize from TsFileInput.
    *
-   * @param input TsFileInput
-   * @param offset offset
-   * @param chunkHeaderSize the size of chunk's header
-   * @param markerRead read marker (boolean type)
+   * @param input           TsFileInput
+   * @param offset          offset
+   * @param chunkHeaderSize the estimated size of chunk's header
    * @return CHUNK_HEADER object
    * @throws IOException IOException
    */
-  public static ChunkHeader deserializeFrom(TsFileInput input, long offset, int chunkHeaderSize,
-      boolean markerRead)
-      throws IOException {
-    long offsetVar = offset;
-    if (!markerRead) {
-      offsetVar++;
-    }
+  public static ChunkHeader deserializeFrom(TsFileInput input, long offset, int chunkHeaderSize) throws IOException {
 
     // read chunk header from input to buffer
     ByteBuffer buffer = ByteBuffer.allocate(chunkHeaderSize);
-    input.read(buffer, offsetVar);
+    input.read(buffer, offset);
     buffer.flip();
 
+    byte chunkType = buffer.get();
     // read measurementID
-    int size = buffer.getInt();
+    int size = ReadWriteForEncodingUtils.readVarInt(buffer);
     String measurementID = ReadWriteIOUtils.readStringWithLength(buffer, size);
-    int dataSize = ReadWriteIOUtils.readInt(buffer);
-    TSDataType dataType = TSDataType.deserialize(ReadWriteIOUtils.readShort(buffer));
-    int numOfPages = ReadWriteIOUtils.readInt(buffer);
+    int dataSize = ReadWriteForEncodingUtils.readVarInt(buffer);
+    TSDataType dataType = ReadWriteIOUtils.readDataType(buffer);
     CompressionType type = ReadWriteIOUtils.readCompressionType(buffer);
     TSEncoding encoding = ReadWriteIOUtils.readEncoding(buffer);
-    return new ChunkHeader(measurementID, dataSize, chunkHeaderSize, dataType, type, encoding,
-        numOfPages);
+    chunkHeaderSize =
+        chunkHeaderSize - Integer.BYTES + ReadWriteForEncodingUtils.varIntSize(dataSize);
+    return new ChunkHeader(chunkType, measurementID, dataSize, chunkHeaderSize, dataType, type,
+        encoding);
   }
 
   public int getSerializedSize() {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
index fa24f93..0b3e4cd 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
@@ -23,10 +23,9 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
-
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 
 public class PageHeader {
 
@@ -41,21 +40,24 @@ public class PageHeader {
     this.statistics = statistics;
   }
 
-  public static int calculatePageHeaderSizeWithoutStatistics() {
-    return 2 * Integer.BYTES; // uncompressedSize, compressedSize
+  /**
+   * max page header size without statistics
+   */
+  public static int estimateMaxPageHeaderSizeWithoutStatistics() {
+    return 2 * (Integer.BYTES + 1); // uncompressedSize, compressedSize
   }
 
   public static PageHeader deserializeFrom(InputStream inputStream, TSDataType dataType)
       throws IOException {
-    int uncompressedSize = ReadWriteIOUtils.readInt(inputStream);
-    int compressedSize = ReadWriteIOUtils.readInt(inputStream);
+    int uncompressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(inputStream);
+    int compressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(inputStream);
     Statistics statistics = Statistics.deserialize(inputStream, dataType);
     return new PageHeader(uncompressedSize, compressedSize, statistics);
   }
 
   public static PageHeader deserializeFrom(ByteBuffer buffer, TSDataType dataType) {
-    int uncompressedSize = ReadWriteIOUtils.readInt(buffer);
-    int compressedSize = ReadWriteIOUtils.readInt(buffer);
+    int uncompressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
+    int compressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
     Statistics statistics = Statistics.deserialize(buffer, dataType);
     return new PageHeader(uncompressedSize, compressedSize, statistics);
   }
@@ -93,8 +95,8 @@ public class PageHeader {
   }
 
   public void serializeTo(OutputStream outputStream) throws IOException {
-    ReadWriteIOUtils.write(uncompressedSize, outputStream);
-    ReadWriteIOUtils.write(compressedSize, outputStream);
+    ReadWriteForEncodingUtils.writeUnsignedVarInt(uncompressedSize, outputStream);
+    ReadWriteForEncodingUtils.writeUnsignedVarInt(compressedSize, outputStream);
     statistics.serialize(outputStream);
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BinaryStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BinaryStatistics.java
index 607589b..6934d17 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BinaryStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BinaryStatistics.java
@@ -101,8 +101,13 @@ public class BinaryStatistics extends Statistics<Binary> {
   }
 
   @Override
-  public double getSumValue() {
-    throw new StatisticsClassException(String.format(BINARY_STATS_UNSUPPORTED_MSG, "sum"));
+  public double getSumDoubleValue() {
+    throw new StatisticsClassException(String.format(BINARY_STATS_UNSUPPORTED_MSG, "double sum"));
+  }
+
+  @Override
+  public long getSumLongValue() {
+    throw new StatisticsClassException(String.format(BINARY_STATS_UNSUPPORTED_MSG, "long sum"));
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
index f7c2d1c..e24ef46 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
@@ -32,8 +32,9 @@ public class BooleanStatistics extends Statistics<Boolean> {
 
   private boolean firstValue;
   private boolean lastValue;
+  private long sumValue;
 
-  static final int BOOLEAN_STATISTICS_FIXED_RAM_SIZE = 48;
+  static final int BOOLEAN_STATISTICS_FIXED_RAM_SIZE = 56;
 
 
   @Override
@@ -43,7 +44,7 @@ public class BooleanStatistics extends Statistics<Boolean> {
 
   @Override
   public int getStatsSize() {
-    return 2;
+    return 10;
   }
 
   /**
@@ -52,16 +53,18 @@ public class BooleanStatistics extends Statistics<Boolean> {
    * @param firstValue first boolean value
    * @param lastValue  last boolean value
    */
-  public void initializeStats(boolean firstValue, boolean lastValue) {
+  public void initializeStats(boolean firstValue, boolean lastValue, long sum) {
     this.firstValue = firstValue;
     this.lastValue = lastValue;
+    this.sumValue = sum;
   }
 
-  private void updateStats(boolean firstValue, boolean lastValue) {
+  private void updateStats(boolean firstValue, boolean lastValue, long sum) {
     this.lastValue = lastValue;
+    this.sumValue += sum;
   }
 
-  private void updateStats(boolean firstValue, boolean lastValue, long startTime, long endTime) {
+  private void updateStats(boolean firstValue, boolean lastValue, long startTime, long endTime, long sum) {
     // only if endTime greater or equals to the current endTime need we update the last value
     // only if startTime less or equals to the current startTime need we update the first value
     // otherwise, just ignore
@@ -71,15 +74,16 @@ public class BooleanStatistics extends Statistics<Boolean> {
     if (endTime >= this.getEndTime()) {
       this.lastValue = lastValue;
     }
+    this.sumValue += sum;
   }
 
   @Override
   void updateStats(boolean value) {
     if (isEmpty) {
-      initializeStats(value, value);
+      initializeStats(value, value, value ? 1 : 0);
       isEmpty = false;
     } else {
-      updateStats(value, value);
+      updateStats(value, value, value ? 1 : 0);
     }
   }
 
@@ -120,8 +124,13 @@ public class BooleanStatistics extends Statistics<Boolean> {
   }
 
   @Override
-  public double getSumValue() {
-    throw new StatisticsClassException("Boolean statistics does not support: sum");
+  public double getSumDoubleValue() {
+    throw new StatisticsClassException("Boolean statistics does not support: double sum");
+  }
+
+  @Override
+  public long getSumLongValue() {
+    return sumValue;
   }
 
   @Override
@@ -146,17 +155,18 @@ public class BooleanStatistics extends Statistics<Boolean> {
 
   @Override
   public ByteBuffer getSumValueBuffer() {
-    throw new StatisticsClassException("Boolean statistics do not support: sum");
+    return ReadWriteIOUtils.getByteBuffer(sumValue);
   }
 
   @Override
   protected void mergeStatisticsValue(Statistics stats) {
     BooleanStatistics boolStats = (BooleanStatistics) stats;
     if (isEmpty) {
-      initializeStats(boolStats.getFirstValue(), boolStats.getLastValue());
+      initializeStats(boolStats.getFirstValue(), boolStats.getLastValue(), boolStats.sumValue);
       isEmpty = false;
     } else {
-      updateStats(boolStats.getFirstValue(), boolStats.getLastValue(), stats.getStartTime(), stats.getEndTime());
+      updateStats(boolStats.getFirstValue(), boolStats.getLastValue(), stats.getStartTime(),
+          stats.getEndTime(), boolStats.sumValue);
     }
   }
 
@@ -182,7 +192,7 @@ public class BooleanStatistics extends Statistics<Boolean> {
 
   @Override
   public byte[] getSumValueBytes() {
-    throw new StatisticsClassException("Boolean statistics does not support: sum");
+    return BytesUtils.longToBytes(sumValue);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatistics.java
index 4ca9e81..dec73a1 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatistics.java
@@ -18,14 +18,14 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.utils.BytesUtils;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.utils.BytesUtils;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 /**
  * Statistics for int type.
@@ -36,7 +36,7 @@ public class IntegerStatistics extends Statistics<Integer> {
   private int maxValue;
   private int firstValue;
   private int lastValue;
-  private double sumValue;
+  private long sumValue;
 
   static final int INTEGER_STATISTICS_FIXED_RAM_SIZE = 64;
 
@@ -51,7 +51,7 @@ public class IntegerStatistics extends Statistics<Integer> {
     return 24;
   }
 
-  public void initializeStats(int min, int max, int first, int last, double sum) {
+  public void initializeStats(int min, int max, int first, int last, long sum) {
     this.minValue = min;
     this.maxValue = max;
     this.firstValue = first;
@@ -59,7 +59,7 @@ public class IntegerStatistics extends Statistics<Integer> {
     this.sumValue = sum;
   }
 
-  private void updateStats(int minValue, int maxValue, int lastValue, double sumValue) {
+  private void updateStats(int minValue, int maxValue, int lastValue, long sumValue) {
     if (minValue < this.minValue) {
       this.minValue = minValue;
     }
@@ -70,7 +70,8 @@ public class IntegerStatistics extends Statistics<Integer> {
     this.lastValue = lastValue;
   }
 
-  private void updateStats(int minValue, int maxValue, int firstValue, int lastValue, double sumValue, long startTime, long endTime) {
+  private void updateStats(int minValue, int maxValue, int firstValue, int lastValue,
+      long sumValue, long startTime, long endTime) {
     if (minValue < this.minValue) {
       this.minValue = minValue;
     }
@@ -102,7 +103,6 @@ public class IntegerStatistics extends Statistics<Integer> {
       isEmpty = false;
     } else {
       updateStats(value, value, value, value);
-      isEmpty = false;
     }
   }
 
@@ -139,7 +139,12 @@ public class IntegerStatistics extends Statistics<Integer> {
   }
 
   @Override
-  public double getSumValue() {
+  public double getSumDoubleValue() {
+    throw new StatisticsClassException("Integer statistics does not support: double sum");
+  }
+
+  @Override
+  public long getSumLongValue() {
     return sumValue;
   }
 
@@ -147,14 +152,13 @@ public class IntegerStatistics extends Statistics<Integer> {
   protected void mergeStatisticsValue(Statistics stats) {
     IntegerStatistics intStats = (IntegerStatistics) stats;
     if (isEmpty) {
-      initializeStats(intStats.getMinValue(), intStats.getMaxValue(), intStats.getFirstValue(), intStats.getLastValue(),
-          intStats.getSumValue());
+      initializeStats(intStats.getMinValue(), intStats.getMaxValue(), intStats.getFirstValue(),
+          intStats.getLastValue(), intStats.sumValue);
       isEmpty = false;
     } else {
-      updateStats(intStats.getMinValue(), intStats.getMaxValue(), intStats.getFirstValue(), intStats.getLastValue(),
-          intStats.getSumValue(), stats.getStartTime(), stats.getEndTime());
+      updateStats(intStats.getMinValue(), intStats.getMaxValue(), intStats.getFirstValue(),
+          intStats.getLastValue(), intStats.sumValue, stats.getStartTime(), stats.getEndTime());
     }
-
   }
 
   @Override
@@ -204,7 +208,7 @@ public class IntegerStatistics extends Statistics<Integer> {
 
   @Override
   public byte[] getSumValueBytes() {
-    return BytesUtils.doubleToBytes(sumValue);
+    return BytesUtils.longToBytes(sumValue);
   }
 
   @Override
@@ -224,7 +228,7 @@ public class IntegerStatistics extends Statistics<Integer> {
     this.maxValue = ReadWriteIOUtils.readInt(inputStream);
     this.firstValue = ReadWriteIOUtils.readInt(inputStream);
     this.lastValue = ReadWriteIOUtils.readInt(inputStream);
-    this.sumValue = ReadWriteIOUtils.readDouble(inputStream);
+    this.sumValue = ReadWriteIOUtils.readLong(inputStream);
   }
 
   @Override
@@ -233,12 +237,12 @@ public class IntegerStatistics extends Statistics<Integer> {
     this.maxValue = ReadWriteIOUtils.readInt(byteBuffer);
     this.firstValue = ReadWriteIOUtils.readInt(byteBuffer);
     this.lastValue = ReadWriteIOUtils.readInt(byteBuffer);
-    this.sumValue = ReadWriteIOUtils.readDouble(byteBuffer);
+    this.sumValue = ReadWriteIOUtils.readLong(byteBuffer);
   }
 
   @Override
   public String toString() {
-    return super.toString() + " [minValue:" + minValue + ",maxValue:" + maxValue + ",firstValue:" + firstValue +
-        ",lastValue:" + lastValue + ",sumValue:" + sumValue + "]";
+    return super.toString() + " [minValue:" + minValue + ",maxValue:" + maxValue + ",firstValue:"
+        + firstValue + ",lastValue:" + lastValue + ",sumValue:" + sumValue + "]";
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java
index 7a31494..acbc339 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/Statistics.java
@@ -28,6 +28,7 @@ import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
 import org.apache.iotdb.tsfile.exception.write.UnknownColumnTypeException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.utils.Binary;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,7 +51,7 @@ public abstract class Statistics<T> {
   /**
    * number of time-value points
    */
-  private long count = 0;
+  private int count = 0;
 
   private long startTime = Long.MAX_VALUE;
   private long endTime = Long.MIN_VALUE;
@@ -102,7 +103,8 @@ public abstract class Statistics<T> {
   public abstract TSDataType getType();
 
   public int getSerializedSize() {
-    return 24 // count, startTime, endTime
+    return ReadWriteForEncodingUtils.uVarIntSize(count) // count
+        + 16 // startTime, endTime
         + getStatsSize();
   }
 
@@ -110,7 +112,7 @@ public abstract class Statistics<T> {
 
   public int serialize(OutputStream outputStream) throws IOException {
     int byteLen = 0;
-    byteLen += ReadWriteIOUtils.write(count, outputStream);
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(count, outputStream);
     byteLen += ReadWriteIOUtils.write(startTime, outputStream);
     byteLen += ReadWriteIOUtils.write(endTime, outputStream);
     // value statistics of different data type
@@ -137,7 +139,9 @@ public abstract class Statistics<T> {
 
   public abstract T getLastValue();
 
-  public abstract double getSumValue();
+  public abstract double getSumDoubleValue();
+
+  public abstract long getSumLongValue();
 
   public abstract byte[] getMinValueBytes();
 
@@ -389,7 +393,7 @@ public abstract class Statistics<T> {
   public static Statistics deserialize(InputStream inputStream, TSDataType dataType)
       throws IOException {
     Statistics statistics = getStatsByType(dataType);
-    statistics.setCount(ReadWriteIOUtils.readLong(inputStream));
+    statistics.setCount(ReadWriteForEncodingUtils.readUnsignedVarInt(inputStream));
     statistics.setStartTime(ReadWriteIOUtils.readLong(inputStream));
     statistics.setEndTime(ReadWriteIOUtils.readLong(inputStream));
     statistics.deserialize(inputStream);
@@ -399,7 +403,7 @@ public abstract class Statistics<T> {
 
   public static Statistics deserialize(ByteBuffer buffer, TSDataType dataType) {
     Statistics statistics = getStatsByType(dataType);
-    statistics.setCount(ReadWriteIOUtils.readLong(buffer));
+    statistics.setCount(ReadWriteForEncodingUtils.readUnsignedVarInt(buffer));
     statistics.setStartTime(ReadWriteIOUtils.readLong(buffer));
     statistics.setEndTime(ReadWriteIOUtils.readLong(buffer));
     statistics.deserialize(buffer);
@@ -427,7 +431,7 @@ public abstract class Statistics<T> {
     this.endTime = endTime;
   }
 
-  public void setCount(long count) {
+  public void setCount(int count) {
     this.count = count;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index 16f932e..9b9b3f4 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -39,7 +39,7 @@ import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.compress.IUnCompressor;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
@@ -207,7 +207,7 @@ public class TsFileSequenceReader implements AutoCloseable {
    */
   public boolean isComplete() throws IOException {
     return tsFileInput.size() >= TSFileConfig.MAGIC_STRING.getBytes().length * 2
-        + TSFileConfig.VERSION_NUMBER.getBytes().length
+        + TSFileConfig.VERSION_NUMBER_V2.getBytes().length
         && (readTailMagic().equals(readHeadMagic()) || readTailMagic()
         .equals(TSFileConfig.VERSION_NUMBER_V1));
   }
@@ -228,7 +228,7 @@ public class TsFileSequenceReader implements AutoCloseable {
    */
   public String readVersionNumber() throws IOException {
     ByteBuffer versionNumberBytes = ByteBuffer
-        .allocate(TSFileConfig.VERSION_NUMBER.getBytes().length);
+        .allocate(TSFileConfig.VERSION_NUMBER_V2.getBytes().length);
     tsFileInput.read(versionNumberBytes, TSFileConfig.MAGIC_STRING.getBytes().length);
     versionNumberBytes.flip();
     return new String(versionNumberBytes.array());
@@ -654,8 +654,8 @@ public class TsFileSequenceReader implements AutoCloseable {
    * @return a CHUNK_GROUP_FOOTER
    * @throws IOException io error
    */
-  public ChunkGroupFooter readChunkGroupFooter() throws IOException {
-    return ChunkGroupFooter.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
+  public ChunkGroupHeader readChunkGroupFooter() throws IOException {
+    return ChunkGroupHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
   }
 
   /**
@@ -666,9 +666,9 @@ public class TsFileSequenceReader implements AutoCloseable {
    * @return a CHUNK_GROUP_FOOTER
    * @throws IOException io error
    */
-  public ChunkGroupFooter readChunkGroupFooter(long position, boolean markerRead)
+  public ChunkGroupHeader readChunkGroupFooter(long position, boolean markerRead)
       throws IOException {
-    return ChunkGroupFooter.deserializeFrom(tsFileInput, position, markerRead);
+    return ChunkGroupHeader.deserializeFrom(tsFileInput, position, markerRead);
   }
 
   public long readVersion() throws IOException {
@@ -687,20 +687,17 @@ public class TsFileSequenceReader implements AutoCloseable {
    * @return a CHUNK_HEADER
    * @throws IOException io error
    */
-  public ChunkHeader readChunkHeader() throws IOException {
-    return ChunkHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
+  public ChunkHeader readChunkHeader(byte chunkType) throws IOException {
+    return ChunkHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), chunkType);
   }
 
   /**
    * read the chunk's header.
-   *
-   * @param position        the file offset of this chunk's header
+   *  @param position        the file offset of this chunk's header
    * @param chunkHeaderSize the size of chunk's header
-   * @param markerRead      true if the offset does not contains the marker , otherwise false
    */
-  private ChunkHeader readChunkHeader(long position, int chunkHeaderSize, boolean markerRead)
-      throws IOException {
-    return ChunkHeader.deserializeFrom(tsFileInput, position, chunkHeaderSize, markerRead);
+  private ChunkHeader readChunkHeader(long position, int chunkHeaderSize) throws IOException {
+    return ChunkHeader.deserializeFrom(tsFileInput, position, chunkHeaderSize);
   }
 
   /**
@@ -722,7 +719,7 @@ public class TsFileSequenceReader implements AutoCloseable {
    */
   public Chunk readMemChunk(ChunkMetadata metaData) throws IOException {
     int chunkHeadSize = ChunkHeader.getSerializedSize(metaData.getMeasurementUid());
-    ChunkHeader header = readChunkHeader(metaData.getOffsetOfChunkHeader(), chunkHeadSize, false);
+    ChunkHeader header = readChunkHeader(metaData.getOffsetOfChunkHeader(), chunkHeadSize);
     ByteBuffer buffer = readChunk(metaData.getOffsetOfChunkHeader() + header.getSerializedSize(),
         header.getDataSize());
     return new Chunk(header, buffer, metaData.getDeleteIntervalList());
@@ -903,12 +900,12 @@ public class TsFileSequenceReader implements AutoCloseable {
     List<ChunkMetadata> chunkMetadataList = null;
     String deviceID;
 
-    int headerLength = TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
+    int headerLength = TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
         .getBytes().length;
     if (fileSize < headerLength) {
       return TsFileCheckStatus.INCOMPATIBLE_FILE;
     }
-    if (!TSFileConfig.MAGIC_STRING.equals(readHeadMagic()) || !TSFileConfig.VERSION_NUMBER
+    if (!TSFileConfig.MAGIC_STRING.equals(readHeadMagic()) || !TSFileConfig.VERSION_NUMBER_V2
         .equals(readVersionNumber())) {
       return TsFileCheckStatus.INCOMPATIBLE_FILE;
     }
@@ -932,6 +929,7 @@ public class TsFileSequenceReader implements AutoCloseable {
       while ((marker = this.readMarker()) != MetaMarker.SEPARATOR) {
         switch (marker) {
           case MetaMarker.CHUNK_HEADER:
+          case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
             // this is the first chunk of a new ChunkGroup.
             if (newChunkGroup) {
               newChunkGroup = false;
@@ -941,7 +939,7 @@ public class TsFileSequenceReader implements AutoCloseable {
             // if there is something wrong with a chunk, we will drop the whole ChunkGroup
             // as different chunks may be created by the same insertions(sqls), and partial
             // insertion is not tolerable
-            ChunkHeader chunkHeader = this.readChunkHeader();
+            ChunkHeader chunkHeader = this.readChunkHeader(marker);
             measurementID = chunkHeader.getMeasurementID();
             MeasurementSchema measurementSchema = new MeasurementSchema(measurementID,
                 chunkHeader.getDataType(),
@@ -960,12 +958,12 @@ public class TsFileSequenceReader implements AutoCloseable {
             chunkMetadataList.add(currentChunk);
             chunkCnt++;
             break;
-          case MetaMarker.CHUNK_GROUP_FOOTER:
+          case MetaMarker.CHUNK_GROUP_HEADER:
             // this is a chunk group
             // if there is something wrong with the ChunkGroup Footer, we will drop this ChunkGroup
             // because we can not guarantee the correctness of the deviceId.
-            ChunkGroupFooter chunkGroupFooter = this.readChunkGroupFooter();
-            deviceID = chunkGroupFooter.getDeviceID();
+            ChunkGroupHeader chunkGroupHeader = this.readChunkGroupFooter();
+            deviceID = chunkGroupHeader.getDeviceID();
             if (newSchema != null) {
               for (MeasurementSchema tsSchema : measurementSchemaList) {
                 newSchema.putIfAbsent(new Path(deviceID, tsSchema.getMeasurementId()), tsSchema);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/LocalTsFileInput.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/LocalTsFileInput.java
index de314b1..ae05d7d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/LocalTsFileInput.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/LocalTsFileInput.java
@@ -25,6 +25,7 @@ import java.nio.channels.Channels;
 import java.nio.channels.FileChannel;
 import java.nio.file.Path;
 import java.nio.file.StandardOpenOption;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -125,4 +126,13 @@ public class LocalTsFileInput implements TsFileInput {
   public int readInt() {
     throw new UnsupportedOperationException();
   }
+
+  @Override
+  public String readVarIntString(long offset) throws IOException {
+    long position = channel.position();
+    channel.position(offset);
+    String res = ReadWriteIOUtils.readVarIntString(wrapAsInputStream());
+    channel.position(position);
+    return res;
+  }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/TsFileInput.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/TsFileInput.java
index b948e0c..e215d3c 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/TsFileInput.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/TsFileInput.java
@@ -143,4 +143,9 @@ public interface TsFileInput {
    * read 4 bytes from the Input and convert it to a integer.
    */
   int readInt() throws IOException;
+
+  /**
+   * read a string from the Input at the given position
+   */
+  String readVarIntString(long offset) throws IOException;
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java
index 0b1f8b0..578ab4c 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java
@@ -106,6 +106,15 @@ public class ReadWriteForEncodingUtils {
     return value | (b << i);
   }
 
+  public static int readVarInt(InputStream in) throws IOException {
+    int value = readUnsignedVarInt(in);
+    int x = value >>> 1;
+    if ((value & 1) != 0) {
+      x = ~x;
+    }
+    return x;
+  }
+
   /**
    * read an unsigned var int in stream and transform it to int format.
    *
@@ -123,6 +132,15 @@ public class ReadWriteForEncodingUtils {
     return value | (b << i);
   }
 
+  public static int readVarInt(ByteBuffer buffer) {
+    int value = readUnsignedVarInt(buffer);
+    int x = value >>> 1;
+    if ((value & 1) != 0) {
+      x = ~x;
+    }
+    return x;
+  }
+
   /**
    * write a value to stream using unsigned var int format. for example, int
    * 123456789 has its binary format 00000111-01011011-11001101-00010101 (if we
@@ -134,12 +152,42 @@ public class ReadWriteForEncodingUtils {
    * @param value value to write into stream
    * @param out   output stream
    */
-  public static void writeUnsignedVarInt(int value, ByteArrayOutputStream out) {
+  public static int writeUnsignedVarInt(int value, ByteArrayOutputStream out) {
+    int position = 1;
+    while ((value & 0xFFFFFF80) != 0L) {
+      out.write((value & 0x7F) | 0x80);
+      value >>>= 7;
+      position++;
+    }
+    out.write(value & 0x7F);
+    return position;
+  }
+
+  public static int writeVarInt(int value, ByteArrayOutputStream out) {
+    int uValue = value << 1;
+    if (value < 0) {
+      uValue = ~uValue;
+    }
+    return writeUnsignedVarInt(uValue, out);
+  }
+
+  public static int writeUnsignedVarInt(int value, OutputStream out) throws IOException {
+    int position = 1;
     while ((value & 0xFFFFFF80) != 0L) {
       out.write((value & 0x7F) | 0x80);
       value >>>= 7;
+      position++;
     }
     out.write(value & 0x7F);
+    return position;
+  }
+
+  public static int writeVarInt(int value, OutputStream out) throws IOException {
+    int uValue = value << 1;
+    if (value < 0) {
+      uValue = ~uValue;
+    }
+    return writeUnsignedVarInt(uValue, out);
   }
 
   /**
@@ -167,6 +215,46 @@ public class ReadWriteForEncodingUtils {
     return position;
   }
 
+  public static int writeVarInt(int value, ByteBuffer buffer) {
+    int uValue = value << 1;
+    if (value < 0) {
+      uValue = ~uValue;
+    }
+    return writeUnsignedVarInt(uValue, buffer);
+  }
+
+  /**
+   * Returns the encoding size in bytes of its input value.
+   * @param value the integer to be measured
+   * @return the encoding size in bytes of its input value
+   */
+  public static int varIntSize(int value) {
+    int uValue = value << 1;
+    if (value < 0) {
+      uValue = ~uValue;
+    }
+    int position = 1;
+    while ((uValue & 0xFFFFFF80) != 0L) {
+      uValue >>>= 7;
+      position++;
+    }
+    return position;
+  }
+
+  /**
+   * Returns the encoding size in bytes of its input value.
+   * @param value the unsigned integer to be measured
+   * @return the encoding size in bytes of its input value
+   */
+  public static int uVarIntSize(int value) {
+    int position = 1;
+    while ((value & 0xFFFFFF80) != 0L) {
+      value >>>= 7;
+      position++;
+    }
+    return position;
+  }
+
   /**
    * write integer value using special bit to output stream.
    *
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
index 04ab1ba..45f00ae 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
@@ -353,6 +353,25 @@ public class ReadWriteIOUtils {
   }
 
   /**
+   * write string to outputStream.
+   *
+   * @return the length of string represented by byte[].
+   */
+  public static int writeVar(String s, OutputStream outputStream) throws IOException {
+    int len = 0;
+    if (s == null) {
+      len += ReadWriteForEncodingUtils.writeVarInt(-1, outputStream);
+      return len;
+    }
+
+    byte[] bytes = s.getBytes();
+    len += write(bytes.length, outputStream);
+    outputStream.write(bytes);
+    len += bytes.length;
+    return len;
+  }
+
+  /**
    * write string to byteBuffer.
    *
    * @return the length of string represented by byte[].
@@ -369,6 +388,18 @@ public class ReadWriteIOUtils {
     return len;
   }
 
+  public static int writeVar(String s, ByteBuffer buffer) {
+    if (s == null) {
+      return write(-1, buffer);
+    }
+    int len = 0;
+    byte[] bytes = s.getBytes();
+    len += write(bytes.length, buffer);
+    buffer.put(bytes);
+    len += bytes.length;
+    return len;
+  }
+
   /**
    * write byteBuffer.capacity and byteBuffer.array to outputStream.
    */
@@ -454,6 +485,13 @@ public class ReadWriteIOUtils {
   }
 
   /**
+   * read a byte var from inputStream.
+   */
+  public static byte readByte(InputStream inputStream) throws IOException {
+    return (byte) inputStream.read();
+  }
+
+  /**
    * read a short var from inputStream.
    */
   public static short readShort(InputStream inputStream) throws IOException {
@@ -587,6 +625,23 @@ public class ReadWriteIOUtils {
   }
 
   /**
+   * string length's type is varInt
+   */
+  public static String readVarIntString(InputStream inputStream) throws IOException {
+    int strLength = ReadWriteForEncodingUtils.readVarInt(inputStream);
+    if (strLength <= 0) {
+      return null;
+    }
+    byte[] bytes = new byte[strLength];
+    int readLen = inputStream.read(bytes, 0, strLength);
+    if (readLen != strLength) {
+      throw new IOException(String.format(RETURN_ERROR,
+          strLength, readLen));
+    }
+    return new String(bytes, 0, strLength);
+  }
+
+  /**
    * read string from byteBuffer.
    */
   public static String readString(ByteBuffer buffer) {
@@ -602,6 +657,21 @@ public class ReadWriteIOUtils {
   }
 
   /**
+   * string length's type is varInt
+   */
+  public static String readVarIntString(ByteBuffer buffer) {
+    int strLength = readInt(buffer);
+    if (strLength < 0) {
+      return null;
+    } else if (strLength == 0) {
+      return "";
+    }
+    byte[] bytes = new byte[strLength];
+    buffer.get(bytes, 0, strLength);
+    return new String(bytes, 0, strLength);
+  }
+
+  /**
    * read string from byteBuffer with user define length.
    */
   public static String readStringWithLength(ByteBuffer buffer, int length) {
@@ -842,33 +912,33 @@ public class ReadWriteIOUtils {
   }
 
   public static CompressionType readCompressionType(InputStream inputStream) throws IOException {
-    short n = readShort(inputStream);
-    return CompressionType.deserialize(n);
+    byte n = readByte(inputStream);
+    return CompressionType.byteToEnum(n);
   }
 
   public static CompressionType readCompressionType(ByteBuffer buffer) {
-    short n = readShort(buffer);
-    return CompressionType.deserialize(n);
+    byte n = buffer.get();
+    return CompressionType.byteToEnum(n);
   }
 
   public static TSDataType readDataType(InputStream inputStream) throws IOException {
-    short n = readShort(inputStream);
-    return TSDataType.deserialize(n);
+    byte n = readByte(inputStream);
+    return TSDataType.byteToEnum(n);
   }
 
   public static TSDataType readDataType(ByteBuffer buffer) {
-    short n = readShort(buffer);
-    return TSDataType.deserialize(n);
+    byte n = buffer.get();
+    return TSDataType.byteToEnum(n);
   }
 
   public static TSEncoding readEncoding(InputStream inputStream) throws IOException {
-    short n = readShort(inputStream);
-    return TSEncoding.deserialize(n);
+    byte n = readByte(inputStream);
+    return TSEncoding.byteToEnum(n);
   }
 
   public static TSEncoding readEncoding(ByteBuffer buffer) {
-    short n = readShort(buffer);
-    return TSEncoding.deserialize(n);
+    byte n = buffer.get();
+    return TSEncoding.byteToEnum(n);
   }
 
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/ChunkGroupMetaDataV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/ChunkGroupMetaDataV1.java
deleted file mode 100644
index 64ce030..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/ChunkGroupMetaDataV1.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Metadata of ChunkGroup.
- */
-public class ChunkGroupMetaDataV1 {
-
-  /**
-   * Name of device, this field is not serialized.
-   */
-  private String deviceID;
-
-  /**
-   * Byte offset of the corresponding data in the file Notice: include the chunk group marker.
-   * For Hadoop and Spark.
-   */
-  private long startOffsetOfChunkGroup;
-
-  /**
-   * End Byte position of the whole chunk group in the file Notice: position after the chunk group footer.
-   * For Hadoop and Spark.
-   */
-  private long endOffsetOfChunkGroup;
-
-  /**
-   * All time series chunks in this chunk group.
-   */
-  private List<ChunkMetadataV1> chunkMetaDataList;
-
-  private long version;
-
-  private ChunkGroupMetaDataV1() {
-    chunkMetaDataList = new ArrayList<>();
-  }
-
-  /**
-   * deserialize from ByteBuffer.
-   *
-   * @param buffer ByteBuffer
-   * @return ChunkGroupMetaData object
-   */
-  public static ChunkGroupMetaDataV1 deserializeFrom(ByteBuffer buffer) {
-    ChunkGroupMetaDataV1 chunkGroupMetaData = new ChunkGroupMetaDataV1();
-
-    chunkGroupMetaData.deviceID = ReadWriteIOUtils.readString(buffer);
-    chunkGroupMetaData.startOffsetOfChunkGroup = ReadWriteIOUtils.readLong(buffer);
-    chunkGroupMetaData.endOffsetOfChunkGroup = ReadWriteIOUtils.readLong(buffer);
-    chunkGroupMetaData.version = ReadWriteIOUtils.readLong(buffer);
-
-    int size = ReadWriteIOUtils.readInt(buffer);
-
-    List<ChunkMetadataV1> chunkMetaDataList = new ArrayList<>();
-    for (int i = 0; i < size; i++) {
-      ChunkMetadataV1 metaData = ChunkMetadataV1.deserializeFrom(buffer);
-      chunkMetaDataList.add(metaData);
-    }
-    chunkGroupMetaData.chunkMetaDataList = chunkMetaDataList;
-
-    return chunkGroupMetaData;
-  }
-
-  /**
-   * add time series chunk metadata to list. THREAD NOT SAFE
-   *
-   * @param metadata time series metadata to add
-   */
-  public void addTimeSeriesChunkMetaData(ChunkMetadataV1 metadata) {
-    if (chunkMetaDataList == null) {
-      chunkMetaDataList = new ArrayList<>();
-    }
-    chunkMetaDataList.add(metadata);
-  }
-
-  public List<ChunkMetadataV1> getChunkMetaDataList() {
-    return chunkMetaDataList;
-  }
-
-  public String getDeviceID() {
-    return deviceID;
-  }
-
-  public long getStartOffsetOfChunkGroup() {
-    return startOffsetOfChunkGroup;
-  }
-
-  public long getEndOffsetOfChunkGroup() {
-    return endOffsetOfChunkGroup;
-  }
-
-  public long getVersion() {
-    return version;
-  }
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/ChunkMetadataV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/ChunkMetadataV1.java
deleted file mode 100644
index fee07ab..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/ChunkMetadataV1.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import org.apache.iotdb.tsfile.v1.file.metadata.statistics.StatisticsV1;
-
-import java.nio.ByteBuffer;
-/**
- * MetaData of one chunk.
- */
-public class ChunkMetadataV1 {
-
-
-  private String measurementUid;
-
-  /**
-   * Byte offset of the corresponding data in the file Notice: include the chunk header and marker.
-   */
-  private long offsetOfChunkHeader;
-
-  private long numOfPoints;
-
-  private long startTime;
-
-  private long endTime;
-
-  private TSDataType tsDataType;
-
-  /**
-   * version is used to define the order of operations(insertion, deletion, update). version is set
-   * according to its belonging ChunkGroup only when being queried, so it is not persisted.
-   */
-  private long version;
-
-  private TsDigestV1 valuesStatistics;
-
-  private ChunkMetadataV1() {
-  }
-
-  /**
-   * deserialize from ByteBuffer.
-   *
-   * @param buffer ByteBuffer
-   * @return ChunkMetaData object
-   */
-  public static ChunkMetadataV1 deserializeFrom(ByteBuffer buffer) {
-    ChunkMetadataV1 chunkMetaData = new ChunkMetadataV1();
-
-    chunkMetaData.measurementUid = ReadWriteIOUtils.readString(buffer);
-    chunkMetaData.offsetOfChunkHeader = ReadWriteIOUtils.readLong(buffer);
-    chunkMetaData.numOfPoints = ReadWriteIOUtils.readLong(buffer);
-    chunkMetaData.startTime = ReadWriteIOUtils.readLong(buffer);
-    chunkMetaData.endTime = ReadWriteIOUtils.readLong(buffer);
-    chunkMetaData.tsDataType = ReadWriteIOUtils.readDataType(buffer);
-
-    chunkMetaData.valuesStatistics = TsDigestV1.deserializeFrom(buffer);
-
-    return chunkMetaData;
-  }
-
-  public long getNumOfPoints() {
-    return numOfPoints;
-  }
-  
-  public ChunkMetadata upgradeToChunkMetadata() {
-    Statistics<?> statistics = StatisticsV1
-        .constructStatisticsFromOldChunkMetadata(this);
-    ChunkMetadata chunkMetadata = new ChunkMetadata(this.measurementUid, this.tsDataType,
-        this.offsetOfChunkHeader, statistics);
-    chunkMetadata.setFromOldTsFile(true);
-    return chunkMetadata;
-  }
-
-  /**
-   * get offset of chunk header.
-   *
-   * @return Byte offset of header of this chunk (includes the marker)
-   */
-  public long getOffsetOfChunkHeader() {
-    return offsetOfChunkHeader;
-  }
-
-  public String getMeasurementUid() {
-    return measurementUid;
-  }
-
-  public TsDigestV1 getDigest() {
-    return valuesStatistics;
-  }
-
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public long getEndTime() {
-    return endTime;
-  }
-
-  public TSDataType getTsDataType() {
-    return tsDataType;
-  }
-
-  public long getVersion() {
-    return version;
-  }
-
-  public void setVersion(long version) {
-    this.version = version;
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TimeseriesMetadataForV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TimeseriesMetadataForV1.java
deleted file mode 100644
index 04fd8e5..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TimeseriesMetadataForV1.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
-import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-
-public class TimeseriesMetadataForV1 extends TimeseriesMetadata {
-  
-  private List<ChunkMetadata> chunkMetadataList;
-
-  public void setChunkMetadataList(List<ChunkMetadata> chunkMetadataList) {
-    this.chunkMetadataList = chunkMetadataList;
-  }
-
-  @Override
-  public List<ChunkMetadata> loadChunkMetadataList() throws IOException {
-    chunkMetadataLoader.setDiskChunkLoader(chunkMetadataList);
-    return chunkMetadataList;
-  }
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDeviceMetadataIndexV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDeviceMetadataIndexV1.java
deleted file mode 100644
index 2258b61..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDeviceMetadataIndexV1.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-public class TsDeviceMetadataIndexV1 {
-
-  /**
-   * The offset of the TsDeviceMetadata.
-   */
-  private long offset;
-  /**
-   * The size of the TsDeviceMetadata in the disk.
-   */
-  private int len;
-  /**
-   * The start time of the device.
-   */
-  private long startTime;
-  /**
-   * The end time of the device.
-   */
-  private long endTime;
-
-  public TsDeviceMetadataIndexV1() {
-    //do nothing
-  }
-
-  /**
-   * use buffer to get a TsDeviceMetadataIndex.
-   *
-   * @param buffer -determine the index's source
-   * @return -a TsDeviceMetadataIndex
-   */
-  public static TsDeviceMetadataIndexV1 deserializeFrom(ByteBuffer buffer) {
-    TsDeviceMetadataIndexV1 index = new TsDeviceMetadataIndexV1();
-    index.offset = ReadWriteIOUtils.readLong(buffer);
-    index.len = ReadWriteIOUtils.readInt(buffer);
-    index.startTime = ReadWriteIOUtils.readLong(buffer);
-    index.endTime = ReadWriteIOUtils.readLong(buffer);
-    return index;
-  }
-
-  public long getOffset() {
-    return offset;
-  }
-
-  public int getLen() {
-    return len;
-  }
-
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public long getEndTime() {
-    return endTime;
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDeviceMetadataV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDeviceMetadataV1.java
deleted file mode 100644
index 4441158..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDeviceMetadataV1.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-public class TsDeviceMetadataV1 {
-
-  /**
-   * start time for a device.
-   **/
-  private long startTime = Long.MAX_VALUE;
-
-  /**
-   * end time for a device.
-   **/
-  private long endTime = Long.MIN_VALUE;
-
-  /**
-   * Row groups in this file.
-   */
-  private List<ChunkGroupMetaDataV1> chunkGroupMetadataList = new ArrayList<>();
-
-  public TsDeviceMetadataV1() {
-    // allowed to clair an empty TsDeviceMetadata whose fields will be assigned later.
-  }
-
-
-  /**
-   * deserialize from the given buffer.
-   *
-   * @param buffer -buffer to deserialize
-   * @return -device meta data
-   */
-  public static TsDeviceMetadataV1 deserializeFrom(ByteBuffer buffer) {
-    TsDeviceMetadataV1 deviceMetadata = new TsDeviceMetadataV1();
-
-    deviceMetadata.startTime = ReadWriteIOUtils.readLong(buffer);
-    deviceMetadata.endTime = ReadWriteIOUtils.readLong(buffer);
-
-    int size = ReadWriteIOUtils.readInt(buffer);
-    if (size > 0) {
-      List<ChunkGroupMetaDataV1> chunkGroupMetaDataList = new ArrayList<>();
-      for (int i = 0; i < size; i++) {
-        chunkGroupMetaDataList.add(ChunkGroupMetaDataV1.deserializeFrom(buffer));
-      }
-      deviceMetadata.chunkGroupMetadataList = chunkGroupMetaDataList;
-    }
-
-    return deviceMetadata;
-  }
-
-  public List<ChunkGroupMetaDataV1> getChunkGroupMetaDataList() {
-    return Collections.unmodifiableList(chunkGroupMetadataList);
-  }
-
-  public long getStartTime() {
-    return startTime;
-  }
-
-  public long getEndTime() {
-    return endTime;
-  }
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDigestV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDigestV1.java
deleted file mode 100644
index a40d3be..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsDigestV1.java
+++ /dev/null
@@ -1,75 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import java.nio.ByteBuffer;
-
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Digest/statistics per chunk group and per page.
- */
-public class TsDigestV1 {
-
-  private ByteBuffer[] statistics;
-
-  public TsDigestV1() {
-    // allowed to declare an empty TsDigest whose fields will be assigned later.
-  }
-
-  /**
-   * use given buffer to deserialize.
-   *
-   * @param buffer -given buffer
-   * @return -an instance of TsDigest
-   */
-  public static TsDigestV1 deserializeFrom(ByteBuffer buffer) {
-    TsDigestV1 digest = new TsDigestV1();
-    int size = ReadWriteIOUtils.readInt(buffer);
-    if (size > 0) {
-      digest.statistics = new ByteBuffer[StatisticType.getTotalTypeNum()];
-      ByteBuffer value;
-      for (int i = 0; i < size; i++) {
-        short n = ReadWriteIOUtils.readShort(buffer);
-        value = ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(buffer);
-        digest.statistics[n] = value;
-      }
-    } // else left digest.statistics as null
-
-    return digest;
-  }
-
-  /**
-   * get statistics of the current object.
-   */
-  public ByteBuffer[] getStatistics() {
-    return statistics;
-  }
-
-  public enum StatisticType {
-    MIN_VALUE, MAX_VALUE, FIRST_VALUE, LAST_VALUE, SUM_VALUE;
-
-    public static int getTotalTypeNum() {
-      return StatisticType.values().length;
-    }
-
-  }
-}
\ No newline at end of file
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsFileMetadataV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsFileMetadataV1.java
deleted file mode 100644
index 9184154..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/TsFileMetadataV1.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.metadata;
-
-import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.iotdb.tsfile.utils.BloomFilter;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
-
-/**
- * TSFileMetaData collects all metadata info and saves in its data structure.
- */
-public class TsFileMetadataV1 {
-
-  private Map<String, TsDeviceMetadataIndexV1> deviceIndexMap = new HashMap<>();
-
-  // bloom filter
-  private BloomFilter bloomFilter;
-
-  public TsFileMetadataV1() {
-    //do nothing
-  }
-
-  /**
-   * deserialize data from the buffer.
-   *
-   * @param buffer -buffer use to deserialize
-   * @return -a instance of TsFileMetaData
-   */
-  public static TsFileMetadataV1 deserializeFrom(ByteBuffer buffer) {
-    TsFileMetadataV1 fileMetaData = new TsFileMetadataV1();
-
-    int size = ReadWriteIOUtils.readInt(buffer);
-    if (size > 0) {
-      Map<String, TsDeviceMetadataIndexV1> deviceMap = new HashMap<>();
-      String key;
-      TsDeviceMetadataIndexV1 value;
-      for (int i = 0; i < size; i++) {
-        key = ReadWriteIOUtils.readString(buffer);
-        value = TsDeviceMetadataIndexV1.deserializeFrom(buffer);
-        deviceMap.put(key, value);
-      }
-      fileMetaData.deviceIndexMap = deviceMap;
-    }
-
-    size = ReadWriteIOUtils.readInt(buffer);
-    if (size > 0) {
-      for (int i = 0; i < size; i++) {
-        ReadWriteIOUtils.readString(buffer);
-        MeasurementSchema.deserializeFrom(buffer);
-      }
-    }
-
-    if (ReadWriteIOUtils.readIsNull(buffer)) {
-       ReadWriteIOUtils.readString(buffer); // createdBy String
-    }
-    ReadWriteIOUtils.readInt(buffer); // totalChunkNum
-    ReadWriteIOUtils.readInt(buffer); // invalidChunkNum
-    // read bloom filter
-    if (buffer.hasRemaining()) {
-      byte[] bytes = ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(buffer).array();
-      int filterSize = ReadWriteIOUtils.readInt(buffer);
-      int hashFunctionSize = ReadWriteIOUtils.readInt(buffer);
-      fileMetaData.bloomFilter = BloomFilter.buildBloomFilter(bytes, filterSize, hashFunctionSize);
-    }
-
-    return fileMetaData;
-  }
-
-  public BloomFilter getBloomFilter() {
-    return bloomFilter;
-  }
-
-  public Map<String, TsDeviceMetadataIndexV1> getDeviceMap() {
-    return deviceIndexMap;
-  }
-
-  public boolean containsDevice(String deltaObjUid) {
-    return this.deviceIndexMap.containsKey(deltaObjUid);
-  }
-
-  public TsDeviceMetadataIndexV1 getDeviceMetadataIndex(String deviceUid) {
-    return this.deviceIndexMap.get(deviceUid);
-  }
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/BinaryStatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/BinaryStatisticsV1.java
deleted file mode 100644
index 3c68be1..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/BinaryStatisticsV1.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.Binary;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Statistics for string type.
- */
-public class BinaryStatisticsV1 extends StatisticsV1<Binary> {
-
-  private Binary min = new Binary("");
-  private Binary max = new Binary("");
-  private Binary first = new Binary("");
-  private Binary last = new Binary("");
-  private double sum;
-
-  @Override
-  public Binary getMin() {
-    return min;
-  }
-
-  @Override
-  public Binary getMax() {
-    return max;
-  }
-
-  @Override
-  public Binary getFirst() {
-    return first;
-  }
-
-  @Override
-  public Binary getLast() {
-    return last;
-  }
-
-  @Override
-  public double getSum() {
-    return sum;
-  }
-
-  @Override
-  void deserialize(ByteBuffer byteBuffer) throws IOException {
-    this.min = new Binary(
-        ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(byteBuffer).array());
-    this.max = new Binary(
-        ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(byteBuffer).array());
-    this.first = new Binary(
-        ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(byteBuffer).array());
-    this.last = new Binary(
-        ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(byteBuffer).array());
-    this.sum = ReadWriteIOUtils.readDouble(byteBuffer);
-  }
-
-  @Override
-  void deserialize(InputStream inputStream) throws IOException {
-    this.min = new Binary(ReadWriteIOUtils.readBytesWithSelfDescriptionLength(inputStream));
-    this.max = new Binary(ReadWriteIOUtils.readBytesWithSelfDescriptionLength(inputStream));
-    this.first = new Binary(ReadWriteIOUtils.readBytesWithSelfDescriptionLength(inputStream));
-    this.last = new Binary(ReadWriteIOUtils.readBytesWithSelfDescriptionLength(inputStream));
-    this.sum = ReadWriteIOUtils.readDouble(inputStream);
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/BooleanStatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/BooleanStatisticsV1.java
deleted file mode 100644
index 24e2152..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/BooleanStatisticsV1.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Boolean Statistics.
- */
-public class BooleanStatisticsV1 extends StatisticsV1<Boolean> {
-
-  private boolean min;
-  private boolean max;
-  private boolean first;
-  private boolean last;
-  private double sum;
-
-  @Override
-  public Boolean getMin() {
-    return min;
-  }
-
-  @Override
-  public Boolean getMax() {
-    return max;
-  }
-
-  @Override
-  public Boolean getFirst() {
-    return first;
-  }
-
-  @Override
-  public Boolean getLast() {
-    return last;
-  }
-
-  @Override
-  public double getSum() {
-    return sum;
-  }
-
-  @Override
-  void deserialize(ByteBuffer byteBuffer) throws IOException {
-    this.min = ReadWriteIOUtils.readBool(byteBuffer);
-    this.max = ReadWriteIOUtils.readBool(byteBuffer);
-    this.first = ReadWriteIOUtils.readBool(byteBuffer);
-    this.last = ReadWriteIOUtils.readBool(byteBuffer);
-    this.sum = ReadWriteIOUtils.readDouble(byteBuffer);
-  }
-
-  @Override
-  void deserialize(InputStream inputStream) throws IOException {
-    this.min = ReadWriteIOUtils.readBool(inputStream);
-    this.max = ReadWriteIOUtils.readBool(inputStream);
-    this.first = ReadWriteIOUtils.readBool(inputStream);
-    this.last = ReadWriteIOUtils.readBool(inputStream);
-    this.sum = ReadWriteIOUtils.readDouble(inputStream);
-  }
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/DoubleStatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/DoubleStatisticsV1.java
deleted file mode 100644
index e784b91..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/DoubleStatisticsV1.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Statistics for double type.
- */
-public class DoubleStatisticsV1 extends StatisticsV1<Double> {
-
-  private double min;
-  private double max;
-  private double first;
-  private double last;
-  private double sum;
-
-  @Override
-  public Double getMin() {
-    return min;
-  }
-
-  @Override
-  public Double getMax() {
-    return max;
-  }
-
-  @Override
-  public Double getFirst() {
-    return first;
-  }
-
-  @Override
-  public Double getLast() {
-    return last;
-  }
-
-  @Override
-  public double getSum() {
-    return sum;
-  }
-
-  @Override
-  void deserialize(ByteBuffer byteBuffer) throws IOException {
-    this.min = ReadWriteIOUtils.readDouble(byteBuffer);
-    this.max = ReadWriteIOUtils.readDouble(byteBuffer);
-    this.first = ReadWriteIOUtils.readDouble(byteBuffer);
-    this.last = ReadWriteIOUtils.readDouble(byteBuffer);
-    this.sum = ReadWriteIOUtils.readDouble(byteBuffer);
-  }
-
-  @Override
-  void deserialize(InputStream inputStream) throws IOException {
-    this.min = ReadWriteIOUtils.readDouble(inputStream);
-    this.max = ReadWriteIOUtils.readDouble(inputStream);
-    this.first = ReadWriteIOUtils.readDouble(inputStream);
-    this.last = ReadWriteIOUtils.readDouble(inputStream);
-    this.sum = ReadWriteIOUtils.readDouble(inputStream);
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/FloatStatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/FloatStatisticsV1.java
deleted file mode 100644
index 6a8d8e8..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/FloatStatisticsV1.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Statistics for float type.
- */
-public class FloatStatisticsV1 extends StatisticsV1<Float> {
-
-  private float min;
-  private float max;
-  private float first;
-  private double sum;
-  private float last;
-
-  @Override
-  public Float getMin() {
-    return min;
-  }
-
-  @Override
-  public Float getMax() {
-    return max;
-  }
-
-  @Override
-  public Float getFirst() {
-    return first;
-  }
-
-  @Override
-  public Float getLast() {
-    return last;
-  }
-
-  @Override
-  public double getSum() {
-    return sum;
-  }
-
-  @Override
-  void deserialize(ByteBuffer byteBuffer) throws IOException {
-    this.min = ReadWriteIOUtils.readFloat(byteBuffer);
-    this.max = ReadWriteIOUtils.readFloat(byteBuffer);
-    this.first = ReadWriteIOUtils.readFloat(byteBuffer);
-    this.last = ReadWriteIOUtils.readFloat(byteBuffer);
-    this.sum = ReadWriteIOUtils.readDouble(byteBuffer);
-  }
-
-  @Override
-  void deserialize(InputStream inputStream) throws IOException {
-    this.min = ReadWriteIOUtils.readFloat(inputStream);
-    this.max = ReadWriteIOUtils.readFloat(inputStream);
-    this.first = ReadWriteIOUtils.readFloat(inputStream);
-    this.last = ReadWriteIOUtils.readFloat(inputStream);
-    this.sum = ReadWriteIOUtils.readDouble(inputStream);
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/IntegerStatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/IntegerStatisticsV1.java
deleted file mode 100644
index 2511abd..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/IntegerStatisticsV1.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Statistics for int type.
- */
-public class IntegerStatisticsV1 extends StatisticsV1<Integer> {
-
-  private int min;
-  private int max;
-  private int first;
-  private int last;
-  private double sum;
-
-  @Override
-  public Integer getMin() {
-    return min;
-  }
-
-  @Override
-  public Integer getMax() {
-    return max;
-  }
-
-  @Override
-  public Integer getFirst() {
-    return first;
-  }
-
-  @Override
-  public Integer getLast() {
-    return last;
-  }
-
-  @Override
-  public double getSum() {
-    return sum;
-  }
-
-  @Override
-  void deserialize(ByteBuffer byteBuffer) throws IOException {
-    this.min = ReadWriteIOUtils.readInt(byteBuffer);
-    this.max = ReadWriteIOUtils.readInt(byteBuffer);
-    this.first = ReadWriteIOUtils.readInt(byteBuffer);
-    this.last = ReadWriteIOUtils.readInt(byteBuffer);
-    this.sum = ReadWriteIOUtils.readDouble(byteBuffer);
-  }
-
-  @Override
-  void deserialize(InputStream inputStream) throws IOException {
-    this.min = ReadWriteIOUtils.readInt(inputStream);
-    this.max = ReadWriteIOUtils.readInt(inputStream);
-    this.first = ReadWriteIOUtils.readInt(inputStream);
-    this.last = ReadWriteIOUtils.readInt(inputStream);
-    this.sum = ReadWriteIOUtils.readDouble(inputStream);
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/LongStatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/LongStatisticsV1.java
deleted file mode 100644
index 29beeeb..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/LongStatisticsV1.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
-/**
- * Statistics for long type.
- */
-public class LongStatisticsV1 extends StatisticsV1<Long> {
-
-  private long min;
-  private long max;
-  private long first;
-  private long last;
-  private double sum;
-
-  @Override
-  public Long getMin() {
-    return min;
-  }
-
-  @Override
-  public Long getMax() {
-    return max;
-  }
-
-  @Override
-  public Long getFirst() {
-    return first;
-  }
-
-  @Override
-  public Long getLast() {
-    return last;
-  }
-
-  @Override
-  public double getSum() {
-    return sum;
-  }
-
-  @Override
-  void deserialize(ByteBuffer byteBuffer) throws IOException {
-    this.min = ReadWriteIOUtils.readLong(byteBuffer);
-    this.max = ReadWriteIOUtils.readLong(byteBuffer);
-    this.first = ReadWriteIOUtils.readLong(byteBuffer);
-    this.last = ReadWriteIOUtils.readLong(byteBuffer);
-    this.sum = ReadWriteIOUtils.readDouble(byteBuffer);
-  }
-
-  @Override
-  void deserialize(InputStream inputStream) throws IOException {
-    this.min = ReadWriteIOUtils.readLong(inputStream);
-    this.max = ReadWriteIOUtils.readLong(inputStream);
-    this.first = ReadWriteIOUtils.readLong(inputStream);
-    this.last = ReadWriteIOUtils.readLong(inputStream);
-    this.sum = ReadWriteIOUtils.readDouble(inputStream);
-  }
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/StatisticsV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/StatisticsV1.java
deleted file mode 100644
index ea91dd3..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/metadata/statistics/StatisticsV1.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.file.metadata.statistics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-
-import org.apache.iotdb.tsfile.exception.write.UnknownColumnTypeException;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.statistics.BinaryStatistics;
-import org.apache.iotdb.tsfile.file.metadata.statistics.BooleanStatistics;
-import org.apache.iotdb.tsfile.file.metadata.statistics.DoubleStatistics;
-import org.apache.iotdb.tsfile.file.metadata.statistics.FloatStatistics;
-import org.apache.iotdb.tsfile.file.metadata.statistics.IntegerStatistics;
-import org.apache.iotdb.tsfile.file.metadata.statistics.LongStatistics;
-import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
-import org.apache.iotdb.tsfile.utils.Binary;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import org.apache.iotdb.tsfile.v1.file.metadata.ChunkMetadataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsDigestV1;
-
-/**
- * This class is used for recording statistic information of each measurement in a delta file. While
- * writing processing, the processor records the digest information. Statistics includes maximum,
- * minimum and null value count up to version 0.0.1.<br> Each data type extends this Statistic as
- * super class.<br>
- *
- * @param <T> data type for Statistics
- */
-public abstract class StatisticsV1<T> {
-
-  /**
-   * static method providing statistic instance for respective data type.
-   *
-   * @param type - data type
-   * @return Statistics
-   */
-  public static StatisticsV1 getStatsByType(TSDataType type) {
-    switch (type) {
-      case INT32:
-        return new IntegerStatisticsV1();
-      case INT64:
-        return new LongStatisticsV1();
-      case TEXT:
-        return new BinaryStatisticsV1();
-      case BOOLEAN:
-        return new BooleanStatisticsV1();
-      case DOUBLE:
-        return new DoubleStatisticsV1();
-      case FLOAT:
-        return new FloatStatisticsV1();
-      default:
-        throw new UnknownColumnTypeException(type.toString());
-    }
-  }
-
-  public static StatisticsV1 deserialize(InputStream inputStream, TSDataType dataType)
-      throws IOException {
-    StatisticsV1<?> statistics = getStatsByType(dataType);
-    statistics.deserialize(inputStream);
-    return statistics;
-  }
-
-  public static StatisticsV1 deserialize(ByteBuffer buffer, TSDataType dataType) throws IOException {
-    StatisticsV1<?> statistics = getStatsByType(dataType);
-    statistics.deserialize(buffer);
-    return statistics;
-  }
-
-  /**
-   * For upgrading 0.9.x/v1 -> 0.10/v2
-   */
-  public static Statistics upgradeOldStatistics(StatisticsV1<?> oldstatistics,
-      TSDataType dataType, int numOfValues, long maxTimestamp, long minTimestamp) {
-    Statistics<?> statistics = Statistics.getStatsByType(dataType);
-    statistics.setStartTime(minTimestamp);
-    statistics.setEndTime(maxTimestamp);
-    statistics.setCount(numOfValues);
-    statistics.setEmpty(false);
-    switch (dataType) {
-      case INT32:
-        ((IntegerStatistics) statistics)
-        .initializeStats(((IntegerStatisticsV1) oldstatistics).getMin(),
-            ((IntegerStatisticsV1) oldstatistics).getMax(),
-            ((IntegerStatisticsV1) oldstatistics).getFirst(),
-            ((IntegerStatisticsV1) oldstatistics).getLast(),
-            ((IntegerStatisticsV1) oldstatistics).getSum());
-        break;
-      case INT64:
-        ((LongStatistics) statistics)
-        .initializeStats(((LongStatisticsV1) oldstatistics).getMin(),
-            ((LongStatisticsV1) oldstatistics).getMax(),
-            ((LongStatisticsV1) oldstatistics).getFirst(),
-            ((LongStatisticsV1) oldstatistics).getLast(),
-            ((LongStatisticsV1) oldstatistics).getSum());
-        break;
-      case TEXT:
-        ((BinaryStatistics) statistics)
-        .initializeStats(((BinaryStatisticsV1) oldstatistics).getFirst(),
-            ((BinaryStatisticsV1) oldstatistics).getLast());
-        break;
-      case BOOLEAN:
-        ((BooleanStatistics) statistics)
-        .initializeStats(((BooleanStatisticsV1) oldstatistics).getFirst(),
-            ((BooleanStatisticsV1) oldstatistics).getLast());
-        break;
-      case DOUBLE:
-        ((DoubleStatistics) statistics)
-        .initializeStats(((DoubleStatisticsV1) oldstatistics).getMin(),
-            ((DoubleStatisticsV1) oldstatistics).getMax(),
-            ((DoubleStatisticsV1) oldstatistics).getFirst(),
-            ((DoubleStatisticsV1) oldstatistics).getLast(),
-            ((DoubleStatisticsV1) oldstatistics).getSum());
-        break;
-      case FLOAT:
-        ((FloatStatistics) statistics)
-        .initializeStats(((FloatStatisticsV1) oldstatistics).getMin(),
-            ((FloatStatisticsV1) oldstatistics).getMax(),
-            ((FloatStatisticsV1) oldstatistics).getFirst(),
-            ((FloatStatisticsV1) oldstatistics).getLast(),
-            ((FloatStatisticsV1) oldstatistics).getSum());
-        break;
-      default:
-        throw new UnknownColumnTypeException(statistics.getType()
-            .toString());
-    }
-    return statistics;
-  }
-
-  /**
-   * For upgrading 0.9.x/v1 -> 0.10.x/v2
-   */
-  public static Statistics constructStatisticsFromOldChunkMetadata(ChunkMetadataV1 oldChunkMetadata) {
-    Statistics<?> statistics = Statistics.getStatsByType(oldChunkMetadata.getTsDataType());
-    statistics.setStartTime(oldChunkMetadata.getStartTime());
-    statistics.setEndTime(oldChunkMetadata.getEndTime());
-    statistics.setCount(oldChunkMetadata.getNumOfPoints());
-    statistics.setEmpty(false);
-    TsDigestV1 tsDigest = oldChunkMetadata.getDigest();
-    ByteBuffer[] buffers = tsDigest.getStatistics();
-    switch (statistics.getType()) {
-      case INT32:
-        ((IntegerStatistics) statistics)
-        .initializeStats(ReadWriteIOUtils.readInt(buffers[0]),
-            ReadWriteIOUtils.readInt(buffers[1]),
-            ReadWriteIOUtils.readInt(buffers[2]),
-            ReadWriteIOUtils.readInt(buffers[3]),
-            ReadWriteIOUtils.readDouble(buffers[4]));
-        break;
-      case INT64:
-        ((LongStatistics) statistics)
-        .initializeStats(ReadWriteIOUtils.readLong(buffers[0]),
-            ReadWriteIOUtils.readLong(buffers[1]),
-            ReadWriteIOUtils.readLong(buffers[2]),
-            ReadWriteIOUtils.readLong(buffers[3]),
-            ReadWriteIOUtils.readDouble(buffers[4]));
-        break;
-      case TEXT:
-        ((BinaryStatistics) statistics)
-        .initializeStats(new Binary(buffers[2].array()),
-            new Binary(buffers[3].array()));
-        break;
-      case BOOLEAN:
-        ((BooleanStatistics) statistics)
-        .initializeStats(ReadWriteIOUtils.readBool(buffers[2]),
-            ReadWriteIOUtils.readBool(buffers[3]));
-        break;
-      case DOUBLE:
-        ((DoubleStatistics) statistics)
-        .initializeStats(ReadWriteIOUtils.readDouble(buffers[0]),
-            ReadWriteIOUtils.readDouble(buffers[1]),
-            ReadWriteIOUtils.readDouble(buffers[2]),
-            ReadWriteIOUtils.readDouble(buffers[3]),
-            ReadWriteIOUtils.readDouble(buffers[4]));
-        break;
-      case FLOAT:
-        ((FloatStatistics) statistics)
-        .initializeStats(ReadWriteIOUtils.readFloat(buffers[0]),
-            ReadWriteIOUtils.readFloat(buffers[1]),
-            ReadWriteIOUtils.readFloat(buffers[2]),
-            ReadWriteIOUtils.readFloat(buffers[3]),
-            ReadWriteIOUtils.readDouble(buffers[4]));
-        break;
-      default:
-        throw new UnknownColumnTypeException(statistics.getType()
-            .toString());
-    }
-    return statistics;
-  }
-
-  public abstract T getMin();
-
-  public abstract T getMax();
-
-  public abstract T getFirst();
-
-  public abstract T getLast();
-
-  public abstract double getSum();
-
-  /**
-   * read data from the inputStream.
-   */
-  abstract void deserialize(InputStream inputStream) throws IOException;
-
-  abstract void deserialize(ByteBuffer byteBuffer) throws IOException;
-
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/utils/HeaderUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/utils/HeaderUtils.java
deleted file mode 100644
index 403db9a..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/file/utils/HeaderUtils.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.v1.file.utils;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-
-import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.header.ChunkHeader;
-import org.apache.iotdb.tsfile.file.header.PageHeader;
-import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
-import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
-import org.apache.iotdb.tsfile.read.reader.TsFileInput;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import org.apache.iotdb.tsfile.v1.file.metadata.statistics.StatisticsV1;
-
-public class HeaderUtils {
-  
-  private HeaderUtils() {
-  }
-  
-  public static PageHeader deserializePageHeaderV1(InputStream inputStream, TSDataType dataType)
-      throws IOException {
-    int uncompressedSize = ReadWriteIOUtils.readInt(inputStream);
-    int compressedSize = ReadWriteIOUtils.readInt(inputStream);
-    int numOfValues = ReadWriteIOUtils.readInt(inputStream);
-    long maxTimestamp = ReadWriteIOUtils.readLong(inputStream);
-    long minTimestamp = ReadWriteIOUtils.readLong(inputStream);
-    StatisticsV1<?> oldstatistics = StatisticsV1.deserialize(inputStream, dataType);
-    Statistics<?> statistics = StatisticsV1.upgradeOldStatistics(oldstatistics, dataType, 
-        numOfValues, maxTimestamp, minTimestamp);
-    return new PageHeader(uncompressedSize, compressedSize, statistics);
-  }
-
-  public static PageHeader deserializePageHeaderV1(ByteBuffer buffer, TSDataType dataType)
-      throws IOException {
-    int uncompressedSize = ReadWriteIOUtils.readInt(buffer);
-    int compressedSize = ReadWriteIOUtils.readInt(buffer);
-    int numOfValues = ReadWriteIOUtils.readInt(buffer);
-    long maxTimestamp = ReadWriteIOUtils.readLong(buffer);
-    long minTimestamp = ReadWriteIOUtils.readLong(buffer);
-    StatisticsV1<?> oldstatistics = StatisticsV1.deserialize(buffer, dataType);
-    Statistics<?> statistics = StatisticsV1.upgradeOldStatistics(oldstatistics, dataType, 
-        numOfValues, maxTimestamp, minTimestamp);
-    return new PageHeader(uncompressedSize, compressedSize, statistics);
-  }
-
-  /**
-   * deserialize from inputStream.
-   *
-   * @param markerRead Whether the marker of the CHUNK_HEADER has been read
-   */
-  public static ChunkHeader deserializeChunkHeaderV1(InputStream inputStream, boolean markerRead) 
-      throws IOException {
-    if (!markerRead) {
-      byte marker = (byte) inputStream.read();
-      if (marker != MetaMarker.CHUNK_HEADER) {
-        MetaMarker.handleUnexpectedMarker(marker);
-      }
-    }
-
-    String measurementID = ReadWriteIOUtils.readString(inputStream);
-    int dataSize = ReadWriteIOUtils.readInt(inputStream);
-    TSDataType dataType = TSDataType.deserialize(ReadWriteIOUtils.readShort(inputStream));
-    int numOfPages = ReadWriteIOUtils.readInt(inputStream);
-    CompressionType type = ReadWriteIOUtils.readCompressionType(inputStream);
-    TSEncoding encoding = ReadWriteIOUtils.readEncoding(inputStream);
-    // read maxTombstoneTime from old TsFile, has been removed in newer versions of TsFile
-    ReadWriteIOUtils.readLong(inputStream);
-    return new ChunkHeader(measurementID, dataSize, dataType, type, encoding,
-        numOfPages);
-  }
-
-  /**
-   * deserialize from TsFileInput.
-   *
-   * @param input           TsFileInput
-   * @param offset          offset
-   * @param chunkHeaderSize the size of chunk's header
-   * @param markerRead      read marker (boolean type)
-   * @return CHUNK_HEADER object
-   * @throws IOException IOException
-   */
-  public static ChunkHeader deserializeChunkHeaderV1(TsFileInput input, long offset,
-      int chunkHeaderSize, boolean markerRead) throws IOException {
-    long offsetVar = offset;
-    if (!markerRead) {
-      offsetVar++;
-    }
-
-    // read chunk header from input to buffer
-    ByteBuffer buffer = ByteBuffer.allocate(chunkHeaderSize);
-    input.read(buffer, offsetVar);
-    buffer.flip();
-
-    // read measurementID
-    int size = buffer.getInt();
-    String measurementID = ReadWriteIOUtils.readStringWithLength(buffer, size);
-    int dataSize = ReadWriteIOUtils.readInt(buffer);
-    TSDataType dataType = TSDataType.deserialize(ReadWriteIOUtils.readShort(buffer));
-    int numOfPages = ReadWriteIOUtils.readInt(buffer);
-    CompressionType type = ReadWriteIOUtils.readCompressionType(buffer);
-    TSEncoding encoding = ReadWriteIOUtils.readEncoding(buffer);
-    // read maxTombstoneTime from old TsFile, has been removed in newer versions of TsFile
-    ReadWriteIOUtils.readLong(buffer);
-    return new ChunkHeader(measurementID, dataSize, dataType, type, encoding, numOfPages);
-  }
-
-  public static int getSerializedSizeV1(String measurementID) {
-    return Byte.BYTES // marker
-        + Integer.BYTES // measurementID length
-        + measurementID.getBytes(TSFileConfig.STRING_CHARSET).length // measurementID
-        + Integer.BYTES // dataSize
-        + TSDataType.getSerializedSize() // dataType
-        + CompressionType.getSerializedSize() // compressionType
-        + TSEncoding.getSerializedSize() // encodingType
-        + Integer.BYTES // numOfPages
-        + Long.BYTES;  // maxTombstoneTime
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/read/TsFileSequenceReaderForV1.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/read/TsFileSequenceReaderForV1.java
deleted file mode 100644
index 91990be..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/v1/read/TsFileSequenceReaderForV1.java
+++ /dev/null
@@ -1,409 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.v1.read;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.file.header.ChunkHeader;
-import org.apache.iotdb.tsfile.file.header.PageHeader;
-import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
-import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
-import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
-import org.apache.iotdb.tsfile.read.common.Chunk;
-import org.apache.iotdb.tsfile.read.common.Path;
-import org.apache.iotdb.tsfile.read.reader.TsFileInput;
-import org.apache.iotdb.tsfile.utils.BloomFilter;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import org.apache.iotdb.tsfile.v1.file.metadata.ChunkGroupMetaDataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.ChunkMetadataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsDeviceMetadataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsDeviceMetadataIndexV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsFileMetadataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TimeseriesMetadataForV1;
-import org.apache.iotdb.tsfile.v1.file.utils.HeaderUtils;
-
-public class TsFileSequenceReaderForV1 extends TsFileSequenceReader {
-
-  private long fileMetadataPos;
-  private int fileMetadataSize;
-  private TsFileMetadataV1 oldTsFileMetaData;
-  // device -> measurement -> TimeseriesMetadata
-  private Map<String, Map<String, TimeseriesMetadata>> cachedDeviceMetadataFromOldFile = new ConcurrentHashMap<>();
-  private static final ReadWriteLock cacheLock = new ReentrantReadWriteLock();
-  private boolean cacheDeviceMetadata;
-
-  /**
-   * Create a file reader of the given file. The reader will read the tail of the file to get the
-   * file metadata size.Then the reader will skip the first TSFileConfig.MAGIC_STRING.getBytes().length
-   * + TSFileConfig.NUMBER_VERSION.getBytes().length bytes of the file for preparing reading real
-   * data.
-   *
-   * @param file the data file
-   * @throws IOException If some I/O error occurs
-   */
-  public TsFileSequenceReaderForV1(String file) throws IOException {
-    super(file, true);
-  }
-
-  /**
-   * construct function for TsFileSequenceReader.
-   *
-   * @param file -given file name
-   * @param loadMetadataSize -whether load meta data size
-   */
-  public TsFileSequenceReaderForV1(String file, boolean loadMetadataSize) throws IOException {
-    super(file, loadMetadataSize);
-  }
-
-  /**
-   * Create a file reader of the given file. The reader will read the tail of the file to get the
-   * file metadata size.Then the reader will skip the first TSFileConfig.MAGIC_STRING.getBytes().length
-   * + TSFileConfig.NUMBER_VERSION.getBytes().length bytes of the file for preparing reading real
-   * data.
-   *
-   * @param input given input
-   */
-  public TsFileSequenceReaderForV1(TsFileInput input) throws IOException {
-    this(input, true);
-  }
-
-  /**
-   * construct function for TsFileSequenceReader.
-   *
-   * @param input -given input
-   * @param loadMetadataSize -load meta data size
-   */
-  public TsFileSequenceReaderForV1(TsFileInput input, boolean loadMetadataSize) throws IOException {
-    super(input, loadMetadataSize);
-  }
-
-  /**
-   * construct function for TsFileSequenceReader.
-   *
-   * @param input the input of a tsfile. The current position should be a markder and then a chunk
-   * Header, rather than the magic number
-   * @param fileMetadataPos the position of the file metadata in the TsFileInput from the beginning
-   * of the input to the current position
-   * @param fileMetadataSize the byte size of the file metadata in the input
-   */
-  public TsFileSequenceReaderForV1(TsFileInput input, long fileMetadataPos, int fileMetadataSize) {
-    super(input, fileMetadataPos, fileMetadataSize);
-    this.fileMetadataPos = fileMetadataPos;
-    this.fileMetadataSize = fileMetadataSize;
-  }
-
-  @Override
-  public void loadMetadataSize() throws IOException {
-    ByteBuffer metadataSize = ByteBuffer.allocate(Integer.BYTES);
-    if (readTailMagic().equals(TSFileConfig.MAGIC_STRING)) {
-      tsFileInput.read(metadataSize,
-          tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES);
-      metadataSize.flip();
-      // read file metadata size and position
-      fileMetadataSize = ReadWriteIOUtils.readInt(metadataSize);
-      fileMetadataPos = tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length
-          - Integer.BYTES - fileMetadataSize;
-    }
-  }
-  
-  public TsFileMetadataV1 readOldFileMetadata() throws IOException {
-    if (oldTsFileMetaData == null) {
-      oldTsFileMetaData = TsFileMetadataV1
-          .deserializeFrom(readDataFromOldFile(fileMetadataPos, fileMetadataSize));
-    }
-    return oldTsFileMetaData;
-  }
-
-  /**
-   * this function does not modify the position of the file reader.
-   *
-   * @throws IOException io error
-   */
-  @Override
-  public BloomFilter readBloomFilter() throws IOException {
-    readOldFileMetadata();
-    return oldTsFileMetaData.getBloomFilter();
-  }
-
-  /**
-   * this function reads measurements and TimeseriesMetaDatas in given device Thread Safe
-   *
-   * @param device name
-   * @return the map measurementId -> TimeseriesMetaData in one device
-   * @throws IOException io error
-   */
-  @Override
-  public Map<String, TimeseriesMetadata> readDeviceMetadata(String device) throws IOException {
-    if (!cacheDeviceMetadata) {
-      return constructDeviceMetadataFromOldFile(device);
-    }
-
-    cacheLock.readLock().lock();
-    try {
-      if (cachedDeviceMetadataFromOldFile.containsKey(device)) {
-        return cachedDeviceMetadataFromOldFile.get(device);
-      }
-    } finally {
-      cacheLock.readLock().unlock();
-    }
-
-    cacheLock.writeLock().lock();
-    try {
-      if (cachedDeviceMetadataFromOldFile.containsKey(device)) {
-        return cachedDeviceMetadataFromOldFile.get(device);
-      }
-      readOldFileMetadata();
-      if (!oldTsFileMetaData.containsDevice(device)) {
-        return new HashMap<>();
-      }
-      Map<String, TimeseriesMetadata> deviceMetadata = constructDeviceMetadataFromOldFile(device);
-      cachedDeviceMetadataFromOldFile.put(device, deviceMetadata);
-      return deviceMetadata;
-    } finally {
-      cacheLock.writeLock().unlock();
-    }
-  }
-
-  /**
-   * for old TsFile
-   * this function does not modify the position of the file reader.
-   */
-  private Map<String, TimeseriesMetadata> constructDeviceMetadataFromOldFile(String device)
-      throws IOException {
-    Map<String, TimeseriesMetadata> newDeviceMetadata = new HashMap<>();
-    readOldFileMetadata();
-    TsDeviceMetadataIndexV1 index = oldTsFileMetaData.getDeviceMetadataIndex(device);
-    // read TsDeviceMetadata from file
-    TsDeviceMetadataV1 tsDeviceMetadata = readOldTsDeviceMetaData(index);
-    if (tsDeviceMetadata == null) {
-      return newDeviceMetadata;
-    }
-
-    Map<String, List<ChunkMetadata>> measurementChunkMetaMap = new HashMap<>();
-    // get all ChunkMetaData of this path included in all ChunkGroups of this device
-    for (ChunkGroupMetaDataV1 chunkGroupMetaData : tsDeviceMetadata.getChunkGroupMetaDataList()) {
-      List<ChunkMetadataV1> chunkMetaDataListInOneChunkGroup = chunkGroupMetaData.getChunkMetaDataList();
-      for (ChunkMetadataV1 oldChunkMetadata : chunkMetaDataListInOneChunkGroup) {
-        oldChunkMetadata.setVersion(chunkGroupMetaData.getVersion());
-        measurementChunkMetaMap.computeIfAbsent(oldChunkMetadata.getMeasurementUid(), key -> new ArrayList<>())
-          .add(oldChunkMetadata.upgradeToChunkMetadata());
-      }
-    }
-    measurementChunkMetaMap.forEach((measurementId, chunkMetadataList) -> {
-      if (!chunkMetadataList.isEmpty()) {
-        TimeseriesMetadataForV1 timeseiresMetadata = new TimeseriesMetadataForV1();
-        timeseiresMetadata.setMeasurementId(measurementId);
-        timeseiresMetadata.setTSDataType(chunkMetadataList.get(0).getDataType());
-        Statistics<?> statistics = Statistics.getStatsByType(chunkMetadataList.get(0).getDataType());
-        for (ChunkMetadata chunkMetadata : chunkMetadataList) {
-          statistics.mergeStatistics(chunkMetadata.getStatistics());
-        }
-        timeseiresMetadata.setStatistics(statistics);
-        timeseiresMetadata.setChunkMetadataList(chunkMetadataList);
-        newDeviceMetadata.put(measurementId, timeseiresMetadata);
-      }
-    });
-    return newDeviceMetadata;
-  }
-
-  /**
-   * for old TsFile
-   * this function does not modify the position of the file reader.
-   */
-  private TsDeviceMetadataV1 readOldTsDeviceMetaData(TsDeviceMetadataIndexV1 index) 
-      throws IOException {
-    if (index == null) {
-      return null;
-    }
-    return TsDeviceMetadataV1.deserializeFrom(readDataFromOldFile(index.getOffset(), index.getLen()));
-  }
-
-  @Override
-  public TimeseriesMetadata readTimeseriesMetadata(Path path) throws IOException {
-    return getTimeseriesMetadataFromOldFile(path);
-  }
-
-  @Override
-  public List<TimeseriesMetadata> readTimeseriesMetadata(String device, Set<String> measurements)
-      throws IOException {
-    return getTimeseriesMetadataFromOldFile(device, measurements);
-  }
-
-  /**
-   *  for 0.9.x/v1 TsFile
-   */
-  private TimeseriesMetadata getTimeseriesMetadataFromOldFile(Path path) throws IOException {
-    Map<String, TimeseriesMetadata> deviceMetadata = 
-        constructDeviceMetadataFromOldFile(path.getDevice());
-    return deviceMetadata.get(path.getMeasurement());
-  }
-
-  /**
-   *  for 0.9.x/v1 TsFile
-   */
-  private List<TimeseriesMetadata> getTimeseriesMetadataFromOldFile(String device, Set<String> measurements)
-      throws IOException {
-    Map<String, TimeseriesMetadata> deviceMetadata = 
-        constructDeviceMetadataFromOldFile(device);
-    List<TimeseriesMetadata> resultTimeseriesMetadataList = new ArrayList<>();
-    for (Entry<String, TimeseriesMetadata> entry : deviceMetadata.entrySet()) {
-      if (measurements.contains(entry.getKey())) {
-        resultTimeseriesMetadataList.add(entry.getValue());
-      }
-    }
-    return resultTimeseriesMetadataList;
-  }
-
-  /**
-   * read the chunk's header.
-   *
-   * @param position the file offset of this chunk's header
-   * @param chunkHeaderSize the size of chunk's header
-   * @param markerRead true if the offset does not contains the marker , otherwise false
-   */
-  private ChunkHeader readChunkHeaderFromOldFile(long position, int chunkHeaderSize, boolean markerRead)
-      throws IOException {
-    return HeaderUtils.deserializeChunkHeaderV1(tsFileInput, position, chunkHeaderSize, markerRead);
-  }
-
-  /**
-   * notice, this function will modify channel's position.
-   *
-   * @param dataSize the size of chunkdata
-   * @param position the offset of the chunk data
-   * @return the pages of this chunk
-   */
-  private ByteBuffer readChunkFromOldFile(long position, int dataSize) throws IOException {
-    return readDataFromOldFile(position, dataSize);
-  }
-
-  /**
-   * read memory chunk.
-   *
-   * @param metaData -given chunk meta data
-   * @return -chunk
-   */
-  @Override
-  public Chunk readMemChunk(ChunkMetadata metaData) throws IOException {
-    int chunkHeadSize = HeaderUtils.getSerializedSizeV1(metaData.getMeasurementUid());
-    ChunkHeader header = readChunkHeaderFromOldFile(metaData.getOffsetOfChunkHeader(), chunkHeadSize, false);
-    ByteBuffer buffer = readChunkFromOldFile(metaData.getOffsetOfChunkHeader() + chunkHeadSize,
-        header.getDataSize());
-    return new Chunk(header, buffer, metaData.getDeleteIntervalList());
-  }
-
-  /**
-   * not thread safe.
-   *
-   * @param type given tsfile data type
-   */
-  @Override
-  public PageHeader readPageHeader(TSDataType type) throws IOException {
-    return HeaderUtils.deserializePageHeaderV1(tsFileInput.wrapAsInputStream(), type);
-  }
-
-  /**
-   * read data from tsFileInput, from the current position (if position = -1), or the given
-   * position. <br> if position = -1, the tsFileInput's position will be changed to the current
-   * position + real data size that been read. Other wise, the tsFileInput's position is not
-   * changed.
-   *
-   * @param position the start position of data in the tsFileInput, or the current position if
-   * position = -1
-   * @param size the size of data that want to read
-   * @return data that been read.
-   */
-  private ByteBuffer readDataFromOldFile(long position, int size) throws IOException {
-    ByteBuffer buffer = ByteBuffer.allocate(size);
-    if (position < 0) {
-      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer) != size) {
-        throw new IOException("reach the end of the data");
-      }
-    } else {
-      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer, position, size) != size) {
-        throw new IOException("reach the end of the data");
-      }
-    }
-    buffer.flip();
-    return buffer;
-  }
-
-  /**
-   * get ChunkMetaDatas of given path
-   *
-   * @param path timeseries path
-   * @return List of ChunkMetaData
-   */
-  @Override
-  public List<ChunkMetadata> getChunkMetadataList(Path path) throws IOException {
-    return getChunkMetadataListFromOldFile(path);
-  }
-
-  /**
-   *  For old TsFile
-   */
-  private List<ChunkMetadata> getChunkMetadataListFromOldFile(Path path) throws IOException {
-    readOldFileMetadata();
-    if (!oldTsFileMetaData.containsDevice(path.getDevice())) {
-      return new ArrayList<>();
-    }
-
-    // get the index information of TsDeviceMetadata
-    TsDeviceMetadataIndexV1 index = oldTsFileMetaData.getDeviceMetadataIndex(path.getDevice());
-
-    // read TsDeviceMetadata from file
-    TsDeviceMetadataV1 tsDeviceMetadata = readOldTsDeviceMetaData(index);
-    if (tsDeviceMetadata == null) {
-      return new ArrayList<>();
-    }
-
-    // get all ChunkMetaData of this path included in all ChunkGroups of this device
-    List<ChunkMetadataV1> oldChunkMetaDataList = new ArrayList<>();
-    for (ChunkGroupMetaDataV1 chunkGroupMetaData : tsDeviceMetadata.getChunkGroupMetaDataList()) {
-      List<ChunkMetadataV1> chunkMetaDataListInOneChunkGroup = chunkGroupMetaData
-          .getChunkMetaDataList();
-      for (ChunkMetadataV1 chunkMetaData : chunkMetaDataListInOneChunkGroup) {
-        if (path.getMeasurement().equals(chunkMetaData.getMeasurementUid())) {
-          chunkMetaData.setVersion(chunkGroupMetaData.getVersion());
-          oldChunkMetaDataList.add(chunkMetaData);
-        }
-      }
-    }
-    oldChunkMetaDataList.sort(Comparator.comparingLong(ChunkMetadataV1::getStartTime));
-    List<ChunkMetadata> chunkMetadataList = new ArrayList<>();
-    for (ChunkMetadataV1 oldChunkMetaData : oldChunkMetaDataList) {
-      chunkMetadataList.add(oldChunkMetaData.upgradeToChunkMetadata());
-    }
-    return chunkMetadataList;
-  }
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
index 01a5f80..107f026 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
@@ -78,6 +78,12 @@ public class ChunkWriterImpl implements IChunkWriter {
   private Statistics<?> statistics;
 
   /**
+   * first page info
+   */
+  private int sizeWithoutStatistic;
+  private Statistics<?> firstPageStatistics;
+
+  /**
    * @param schema schema of this measurement
    */
   public ChunkWriterImpl(MeasurementSchema schema) {
@@ -95,8 +101,6 @@ public class ChunkWriterImpl implements IChunkWriter {
     this.statistics = Statistics.getStatsByType(measurementSchema.getType());
 
     this.pageWriter = new PageWriter(measurementSchema);
-    this.pageWriter.setTimeEncoder(measurementSchema.getTimeEncoder());
-    this.pageWriter.setValueEncoder(measurementSchema.getValueEncoder());
   }
 
   @Override
@@ -172,8 +176,8 @@ public class ChunkWriterImpl implements IChunkWriter {
   }
 
   /**
-   * check occupied memory size, if it exceeds the PageSize threshold, construct a page and 
-   * put it to pageBuffer
+   * check occupied memory size, if it exceeds the PageSize threshold, construct a page and put it
+   * to pageBuffer
    */
   private void checkPageSizeAndMayOpenANewPage() {
     if (pageWriter.getPointNumber() == maxNumberOfPointsInPage) {
@@ -201,7 +205,19 @@ public class ChunkWriterImpl implements IChunkWriter {
 
   private void writePageToPageBuffer() {
     try {
-      pageWriter.writePageHeaderAndDataIntoBuff(pageBuffer);
+      int sizeWithoutStatistic = pageWriter
+          .writePageHeaderAndDataIntoBuff(pageBuffer, numOfPages == 0);
+      if (numOfPages == 0) { // record the firstPageStatistics
+        this.firstPageStatistics = pageWriter.getStatistics();
+        this.sizeWithoutStatistic = sizeWithoutStatistic;
+      } else if (numOfPages == 1) { // put the firstPageStatistics into pageBuffer
+        byte[] b = pageBuffer.toByteArray();
+        pageBuffer.reset();
+        pageBuffer.write(b, 0, sizeWithoutStatistic);
+        firstPageStatistics.serialize(pageBuffer);
+        pageBuffer.write(b, sizeWithoutStatistic, b.length - sizeWithoutStatistic);
+        firstPageStatistics = null;
+      }
 
       // update statistics of this chunk
       numOfPages++;
@@ -226,7 +242,9 @@ public class ChunkWriterImpl implements IChunkWriter {
 
   @Override
   public long estimateMaxSeriesMemSize() {
-    return pageWriter.estimateMaxMemSize() + this.estimateMaxPageMemSize();
+    return pageBuffer.size() + pageWriter.estimateMaxMemSize() + PageHeader
+        .estimateMaxPageHeaderSizeWithoutStatistics() + pageWriter.getStatistics()
+        .getSerializedSize();
   }
 
   @Override
@@ -235,7 +253,8 @@ public class ChunkWriterImpl implements IChunkWriter {
       return 0;
     }
     // return the serialized size of the chunk header + all pages
-    return ChunkHeader.getSerializedSize(measurementSchema.getMeasurementId()) + (long) pageBuffer.size();
+    return ChunkHeader.getSerializedSize(measurementSchema.getMeasurementId(), pageBuffer.size())
+        + (long) pageBuffer.size();
   }
 
   @Override
@@ -267,10 +286,12 @@ public class ChunkWriterImpl implements IChunkWriter {
 
     // write the page header to pageBuffer
     try {
-      logger.debug("start to flush a page header into buffer, buffer position {} ", pageBuffer.size());
+      logger.debug("start to flush a page header into buffer, buffer position {} ",
+          pageBuffer.size());
       header.serializeTo(pageBuffer);
-      logger.debug("finish to flush a page header {} of {} into buffer, buffer position {} ", header,
-          measurementSchema.getMeasurementId(), pageBuffer.size());
+      logger
+          .debug("finish to flush a page header {} of {} into buffer, buffer position {} ", header,
+              measurementSchema.getMeasurementId(), pageBuffer.size());
 
       statistics.mergeStatistics(header.getStatistics());
 
@@ -319,17 +340,5 @@ public class ChunkWriterImpl implements IChunkWriter {
     writer.endCurrentChunk();
   }
 
-  /**
-   * estimate max page memory size.
-   *
-   * @return the max possible allocated size currently
-   */
-  private long estimateMaxPageMemSize() {
-    // return the sum of size of buffer and page max size
-    return (long) (pageBuffer.size() +
-        PageHeader.calculatePageHeaderSizeWithoutStatistics() +
-        pageWriter.getStatistics().getSerializedSize());
-  }
-
 
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/page/PageWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/page/PageWriter.java
index ba8001a..63bd3a2 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/page/PageWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/page/PageWriter.java
@@ -22,13 +22,8 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.channels.Channels;
 import java.nio.channels.WritableByteChannel;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.iotdb.tsfile.compress.ICompressor;
 import org.apache.iotdb.tsfile.encoding.encoder.Encoder;
-import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
@@ -36,6 +31,8 @@ import org.apache.iotdb.tsfile.utils.Binary;
 import org.apache.iotdb.tsfile.utils.PublicBAOS;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This writer is used to write time-value into a page. It consists of a time
@@ -233,29 +230,36 @@ public class PageWriter {
   /**
    * write the page header and data into the PageWriter's output stream.
    */
-  public void writePageHeaderAndDataIntoBuff(PublicBAOS pageBuffer) throws IOException {
+  public int writePageHeaderAndDataIntoBuff(PublicBAOS pageBuffer, boolean first) throws IOException {
     if (statistics.getCount() == 0) {
-      return;
+      return 0;
     }
 
     ByteBuffer pageData = getUncompressedBytes();
     int uncompressedSize = pageData.remaining();
     int compressedSize;
-    int compressedPosition = 0;
     byte[] compressedBytes = null;
 
     if (compressor.getType().equals(CompressionType.UNCOMPRESSED)) {
-      compressedSize = pageData.remaining();
+      compressedSize = uncompressedSize;
     } else {
+      // TODO maybe we can control this memory to avoid allocate new heap memory each time
       compressedBytes = new byte[compressor.getMaxBytesForCompression(uncompressedSize)];
-      compressedPosition = 0;
       // data is never a directByteBuffer now, so we can use data.array()
       compressedSize = compressor.compress(pageData.array(), pageData.position(), uncompressedSize, compressedBytes);
     }
 
+
     // write the page header to IOWriter
-    PageHeader header = new PageHeader(uncompressedSize, compressedSize, statistics);
-    header.serializeTo(pageBuffer);
+    int sizeWithoutStatistic = 0;
+    if (first) {
+      sizeWithoutStatistic += ReadWriteForEncodingUtils.writeUnsignedVarInt(uncompressedSize, pageBuffer);
+      sizeWithoutStatistic += ReadWriteForEncodingUtils.writeUnsignedVarInt(compressedSize, pageBuffer);
+    } else {
+      ReadWriteForEncodingUtils.writeUnsignedVarInt(uncompressedSize, pageBuffer);
+      ReadWriteForEncodingUtils.writeUnsignedVarInt(compressedSize, pageBuffer);
+      statistics.serialize(pageBuffer);
+    }
 
     // write page content to temp PBAOS
     logger.trace("start to flush a page data into buffer, buffer position {} ", pageBuffer.size());
@@ -264,9 +268,10 @@ public class PageWriter {
         channel.write(pageData);
       }
     } else {
-      pageBuffer.write(compressedBytes, compressedPosition, compressedSize);
+      pageBuffer.write(compressedBytes, 0, compressedSize);
     }
     logger.trace("start to flush a page data into buffer, buffer position {} ", pageBuffer.size());
+    return sizeWithoutStatistic;
   }
 
   /**
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/LocalTsFileOutput.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/LocalTsFileOutput.java
index 8423dfb..43ad692 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/LocalTsFileOutput.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/LocalTsFileOutput.java
@@ -52,6 +52,11 @@ public class LocalTsFileOutput implements TsFileOutput {
   }
 
   @Override
+  public void write(byte b) throws IOException {
+    bufferedStream.write(b);
+  }
+
+  @Override
   public void write(ByteBuffer b) throws IOException {
     bufferedStream.write(b.array());
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index 7ad89b2..40a9161 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -30,7 +30,7 @@ import java.util.TreeMap;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
@@ -60,14 +60,14 @@ import org.slf4j.LoggerFactory;
 public class TsFileIOWriter {
 
   public static final byte[] magicStringBytes;
-  public static final byte[] versionNumberBytes;
+  public static final byte versionNumberByte;
   protected static final TSFileConfig config = TSFileDescriptor.getInstance().getConfig();
   private static final Logger logger = LoggerFactory.getLogger(TsFileIOWriter.class);
   private static final Logger resourceLogger = LoggerFactory.getLogger("FileMonitor");
 
   static {
     magicStringBytes = BytesUtils.stringToBytes(TSFileConfig.MAGIC_STRING);
-    versionNumberBytes = TSFileConfig.VERSION_NUMBER.getBytes();
+    versionNumberByte = TSFileConfig.VERSION_NUMBER;
   }
 
   protected TsFileOutput out;
@@ -85,7 +85,6 @@ public class TsFileIOWriter {
 
   private long markedPosition;
   private String currentChunkGroupDeviceId;
-  private long currentChunkGroupStartOffset;
   protected List<Pair<Long, Long>> versionInfo = new ArrayList<>();
   
   // for upgrade tool
@@ -136,16 +135,17 @@ public class TsFileIOWriter {
 
   protected void startFile() throws IOException {
     out.write(magicStringBytes);
-    out.write(versionNumberBytes);
+    out.write(versionNumberByte);
   }
 
   public void startChunkGroup(String deviceId) throws IOException {
     this.currentChunkGroupDeviceId = deviceId;
-    currentChunkGroupStartOffset = out.getPosition();
     if (logger.isDebugEnabled()) {
       logger.debug("start chunk group:{}, file position {}", deviceId, out.getPosition());
     }
     chunkMetadataList = new ArrayList<>();
+    ChunkGroupHeader chunkGroupHeader = new ChunkGroupHeader(currentChunkGroupDeviceId);
+    chunkGroupHeader.serializeTo(out.wrapAsStream());
   }
 
   /**
@@ -155,12 +155,7 @@ public class TsFileIOWriter {
     if (currentChunkGroupDeviceId == null || chunkMetadataList.isEmpty()) {
       return;
     }
-    long dataSize = out.getPosition() - currentChunkGroupStartOffset;
-    ChunkGroupFooter chunkGroupFooter = new ChunkGroupFooter(currentChunkGroupDeviceId, dataSize,
-        chunkMetadataList.size());
-    chunkGroupFooter.serializeTo(out.wrapAsStream());
-    chunkGroupMetadataList
-        .add(new ChunkGroupMetadata(currentChunkGroupDeviceId, chunkMetadataList));
+    chunkGroupMetadataList.add(new ChunkGroupMetadata(currentChunkGroupDeviceId, chunkMetadataList));
     currentChunkGroupDeviceId = null;
     chunkMetadataList = null;
     out.flush();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileOutput.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileOutput.java
index 77ce3e2..e8de8ca 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileOutput.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileOutput.java
@@ -34,6 +34,14 @@ public interface TsFileOutput {
   void write(byte[] b) throws IOException;
 
   /**
+   * Writes 1 byte  to this output at the current position.
+   *
+   * @param b the data.
+   * @throws IOException if an I/O error occurs.
+   */
+  void write(byte b) throws IOException;
+
+  /**
    * Writes <code>b.remaining()</code> bytes from the specified byte array to this
    * output at the current position.
    *
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
index f8c80ec..97a7a33 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
@@ -30,7 +30,7 @@ import java.util.Set;
 import java.util.stream.Collectors;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
@@ -64,7 +64,7 @@ public class TsFileSequenceReaderTest {
   @Test
   public void testReadTsFileSequently() throws IOException {
     TsFileSequenceReader reader = new TsFileSequenceReader(FILE_PATH);
-    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
+    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
         .getBytes().length);
     Map<String, List<Pair<Long, Long>>> deviceChunkGroupMetadataOffsets = new HashMap<>();
 
@@ -73,14 +73,15 @@ public class TsFileSequenceReaderTest {
     while ((marker = reader.readMarker()) != MetaMarker.SEPARATOR) {
       switch (marker) {
         case MetaMarker.CHUNK_HEADER:
-          ChunkHeader header = reader.readChunkHeader();
+        case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
+          ChunkHeader header = reader.readChunkHeader(marker);
           for (int j = 0; j < header.getNumOfPages(); j++) {
             PageHeader pageHeader = reader.readPageHeader(header.getDataType());
             reader.readPage(pageHeader, header.getCompressionType());
           }
           break;
-        case MetaMarker.CHUNK_GROUP_FOOTER:
-          ChunkGroupFooter footer = reader.readChunkGroupFooter();
+        case MetaMarker.CHUNK_GROUP_HEADER:
+          ChunkGroupHeader footer = reader.readChunkGroupFooter();
           long endOffset = reader.position();
           Pair<Long, Long> pair = new Pair<>(startOffset, endOffset);
           deviceChunkGroupMetadataOffsets.putIfAbsent(footer.getDeviceID(), new ArrayList<>());
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
index 61e4786..a5bb8e6 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.constant.TestConstant;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.metadata.TimeSeriesMetadataTest;
 import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
@@ -84,20 +84,21 @@ public class TsFileIOWriterTest {
 
     // magic_string
     Assert.assertEquals(TSFileConfig.MAGIC_STRING, reader.readHeadMagic());
-    Assert.assertEquals(TSFileConfig.VERSION_NUMBER, reader.readVersionNumber());
+    Assert.assertEquals(TSFileConfig.VERSION_NUMBER_V2, reader.readVersionNumber());
     Assert.assertEquals(TSFileConfig.MAGIC_STRING, reader.readTailMagic());
 
+    // chunk group header
+    Assert.assertEquals(MetaMarker.CHUNK_GROUP_HEADER, reader.readMarker());
+    ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupFooter();
+    Assert.assertEquals(deviceId, chunkGroupHeader.getDeviceID());
+
     // chunk header
-    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
+    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
         .getBytes().length);
-    Assert.assertEquals(MetaMarker.CHUNK_HEADER, reader.readMarker());
-    ChunkHeader header = reader.readChunkHeader();
+    Assert.assertEquals(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER, reader.readMarker());
+    ChunkHeader header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
     Assert.assertEquals(TimeSeriesMetadataTest.measurementUID, header.getMeasurementID());
 
-    // chunk group footer
-    Assert.assertEquals(MetaMarker.CHUNK_GROUP_FOOTER, reader.readMarker());
-    ChunkGroupFooter footer = reader.readChunkGroupFooter();
-    Assert.assertEquals(deviceId, footer.getDeviceID());
 
     // separator
     Assert.assertEquals(MetaMarker.VERSION, reader.readMarker());
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java
index 87b88fc..68c46dc 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java
@@ -85,7 +85,7 @@ public class RestorableTsFileIOWriterTest {
     RestorableTsFileIOWriter rWriter = new RestorableTsFileIOWriter(file);
     writer = new TsFileWriter(rWriter);
     writer.close();
-    assertEquals(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
+    assertEquals(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
         .getBytes().length, rWriter.getTruncatedSize());
 
     rWriter = new RestorableTsFileIOWriter(file);
@@ -105,7 +105,7 @@ public class RestorableTsFileIOWriterTest {
     RestorableTsFileIOWriter rWriter = new RestorableTsFileIOWriter(file);
     writer = new TsFileWriter(rWriter);
     writer.close();
-    assertEquals(TsFileIOWriter.magicStringBytes.length + TsFileIOWriter.versionNumberBytes.length,
+    assertEquals(TsFileIOWriter.magicStringBytes.length + 1,
         rWriter.getTruncatedSize());
     assertTrue(file.delete());
   }
@@ -119,7 +119,7 @@ public class RestorableTsFileIOWriterTest {
     RestorableTsFileIOWriter rWriter = new RestorableTsFileIOWriter(file);
     TsFileWriter writer = new TsFileWriter(rWriter);
     writer.close();
-    assertEquals(TsFileIOWriter.magicStringBytes.length + TsFileIOWriter.versionNumberBytes.length,
+    assertEquals(TsFileIOWriter.magicStringBytes.length + 1,
         rWriter.getTruncatedSize());
     assertTrue(file.delete());
   }
@@ -137,7 +137,7 @@ public class RestorableTsFileIOWriterTest {
     RestorableTsFileIOWriter rWriter = new RestorableTsFileIOWriter(file);
     writer = new TsFileWriter(rWriter);
     writer.close();
-    assertEquals(TsFileIOWriter.magicStringBytes.length + TsFileIOWriter.versionNumberBytes.length,
+    assertEquals(TsFileIOWriter.magicStringBytes.length + 1,
         rWriter.getTruncatedSize());
     assertTrue(file.delete());
   }


[iotdb] 08/11: some changes

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 57bc1b69098b5759ec4d27c996db5bca4b8585d8
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Fri Nov 27 20:32:05 2020 +0800

    some changes
---
 .../apache/iotdb/db/engine/cache/ChunkCache.java   |  5 +--
 .../iotdb/db/query/control/FileReaderManager.java  |  2 +-
 .../org/apache/iotdb/db/utils/FileLoaderUtils.java |  9 +++---
 .../org/apache/iotdb/db/utils/UpgradeUtils.java    |  2 +-
 .../iotdb/tsfile/file/header/ChunkHeader.java      |  4 +++
 .../iotdb/tsfile/file/header/PageHeader.java       |  6 ++++
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 17 +++++-----
 .../org/apache/iotdb/tsfile/read/common/Chunk.java |  9 +++++-
 .../read/controller/CachedChunkLoaderImpl.java     |  3 +-
 .../tsfile/read/reader/chunk/ChunkReader.java      | 37 ++++++++++++----------
 .../iotdb/tsfile/utils/ReadWriteIOUtils.java       |  2 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  |  3 +-
 12 files changed, 61 insertions(+), 38 deletions(-)

diff --git a/server/src/main/java/org/apache/iotdb/db/engine/cache/ChunkCache.java b/server/src/main/java/org/apache/iotdb/db/engine/cache/ChunkCache.java
index 6fccb8b..c621a26 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/cache/ChunkCache.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/cache/ChunkCache.java
@@ -87,7 +87,7 @@ public class ChunkCache {
     if (!CACHE_ENABLE) {
       Chunk chunk = reader.readMemChunk(chunkMetaData);
       return new Chunk(chunk.getHeader(), chunk.getData().duplicate(),
-          chunk.getDeleteIntervalList());
+          chunk.getDeleteIntervalList(), chunkMetaData.getStatistics());
     }
 
     cacheRequestNum.incrementAndGet();
@@ -121,7 +121,8 @@ public class ChunkCache {
     if (config.isDebugOn()) {
       DEBUG_LOGGER.info("get chunk from cache whose meta data is: " + chunkMetaData);
     }
-    return new Chunk(chunk.getHeader(), chunk.getData().duplicate(), chunk.getDeleteIntervalList());
+    return new Chunk(chunk.getHeader(), chunk.getData().duplicate(), chunk.getDeleteIntervalList(),
+        chunkMetaData.getStatistics());
   }
 
   private void printCacheLog(boolean isHit) {
diff --git a/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java b/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
index 9a9eb18..4e99292 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
@@ -167,7 +167,7 @@ public class FileReaderManager implements IService {
       else {
         tsFileReader = new TsFileSequenceReader(filePath);
         switch (tsFileReader.readVersionNumber()) {
-          case TSFileConfig.VERSION_NUMBER_V2:
+          case TSFileConfig.VERSION_NUMBER:
             break;
           default:
             throw new IOException("The version of this TsFile is not corrent. ");
diff --git a/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java b/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java
index b3af734..a42d80f 100644
--- a/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java
+++ b/server/src/main/java/org/apache/iotdb/db/utils/FileLoaderUtils.java
@@ -80,12 +80,13 @@ public class FileLoaderUtils {
   }
 
   /**
-   * @param resource TsFile
+   * @param resource   TsFile
    * @param seriesPath Timeseries path
    * @param allSensors measurements queried at the same time of this device
-   * @param filter any filter, only used to check time range
+   * @param filter     any filter, only used to check time range
    */
-  public static TimeseriesMetadata loadTimeSeriesMetadata(TsFileResource resource, PartialPath seriesPath,
+  public static TimeseriesMetadata loadTimeSeriesMetadata(TsFileResource resource,
+      PartialPath seriesPath,
       QueryContext context, Filter filter, Set<String> allSensors) throws IOException {
     TimeseriesMetadata timeSeriesMetadata;
     if (resource.isClosed()) {
@@ -138,7 +139,7 @@ public class FileLoaderUtils {
    * load all page readers in one chunk that satisfying the timeFilter
    *
    * @param chunkMetaData the corresponding chunk metadata
-   * @param timeFilter it should be a TimeFilter instead of a ValueFilter
+   * @param timeFilter    it should be a TimeFilter instead of a ValueFilter
    */
   public static List<IPageReader> loadPageReaderList(ChunkMetadata chunkMetaData, Filter timeFilter)
       throws IOException {
diff --git a/server/src/main/java/org/apache/iotdb/db/utils/UpgradeUtils.java b/server/src/main/java/org/apache/iotdb/db/utils/UpgradeUtils.java
index c213f64..1856157 100644
--- a/server/src/main/java/org/apache/iotdb/db/utils/UpgradeUtils.java
+++ b/server/src/main/java/org/apache/iotdb/db/utils/UpgradeUtils.java
@@ -68,7 +68,7 @@ public class UpgradeUtils {
     }
     try (TsFileSequenceReader tsFileSequenceReader = new TsFileSequenceReader(
         tsFileResource.getTsFile().getAbsolutePath())) {
-      if (tsFileSequenceReader.readVersionNumber().equals(TSFileConfig.VERSION_NUMBER_V1)) {
+      if (tsFileSequenceReader.readVersionNumber() == TSFileConfig.VERSION_NUMBER_V2.getBytes()[0]) {
         return true;
       }
     } catch (Exception e) {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
index 76c1813..62267e6 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
@@ -223,4 +223,8 @@ public class ChunkHeader {
     this.dataSize += chunkHeader.getDataSize();
     this.numOfPages += chunkHeader.getNumOfPages();
   }
+
+  public byte getChunkType() {
+    return chunkType;
+  }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
index 0b3e4cd..2c0acf9 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/PageHeader.java
@@ -62,6 +62,12 @@ public class PageHeader {
     return new PageHeader(uncompressedSize, compressedSize, statistics);
   }
 
+  public static PageHeader deserializeFrom(ByteBuffer buffer, Statistics chunkStatistic) {
+    int uncompressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
+    int compressedSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
+    return new PageHeader(uncompressedSize, compressedSize, chunkStatistic);
+  }
+
   public int getUncompressedSize() {
     return uncompressedSize;
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index a966ba4..5c7338d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -226,12 +226,11 @@ public class TsFileSequenceReader implements AutoCloseable {
   /**
    * this function reads version number and checks compatibility of TsFile.
    */
-  public String readVersionNumber() throws IOException {
-    ByteBuffer versionNumberBytes = ByteBuffer
-        .allocate(TSFileConfig.VERSION_NUMBER_V2.getBytes().length);
-    tsFileInput.read(versionNumberBytes, TSFileConfig.MAGIC_STRING.getBytes().length);
-    versionNumberBytes.flip();
-    return new String(versionNumberBytes.array());
+  public byte readVersionNumber() throws IOException {
+    ByteBuffer versionNumberByte = ByteBuffer.allocate(Byte.BYTES);
+    tsFileInput.read(versionNumberByte, TSFileConfig.MAGIC_STRING.getBytes().length);
+    versionNumberByte.flip();
+    return versionNumberByte.get();
   }
 
   /**
@@ -728,7 +727,7 @@ public class TsFileSequenceReader implements AutoCloseable {
     ChunkHeader header = readChunkHeader(metaData.getOffsetOfChunkHeader(), chunkHeadSize);
     ByteBuffer buffer = readChunk(metaData.getOffsetOfChunkHeader() + header.getSerializedSize(),
         header.getDataSize());
-    return new Chunk(header, buffer, metaData.getDeleteIntervalList());
+    return new Chunk(header, buffer, metaData.getDeleteIntervalList(), metaData.getStatistics());
   }
 
   /**
@@ -911,8 +910,8 @@ public class TsFileSequenceReader implements AutoCloseable {
     if (fileSize < headerLength) {
       return TsFileCheckStatus.INCOMPATIBLE_FILE;
     }
-    if (!TSFileConfig.MAGIC_STRING.equals(readHeadMagic()) || !TSFileConfig.VERSION_NUMBER_V2
-        .equals(readVersionNumber())) {
+    if (!TSFileConfig.MAGIC_STRING.equals(readHeadMagic()) || !(TSFileConfig.VERSION_NUMBER
+        == readVersionNumber())) {
       return TsFileCheckStatus.INCOMPATIBLE_FILE;
     }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java
index 1968aa9..a7bbadb 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java
@@ -23,6 +23,7 @@ import java.nio.ByteBuffer;
 import java.util.List;
 import org.apache.iotdb.tsfile.common.cache.Accountable;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
+import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 
 /**
  * used in query.
@@ -30,6 +31,7 @@ import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 public class Chunk implements Accountable {
 
   private ChunkHeader chunkHeader;
+  private Statistics chunkStatistic;
   private ByteBuffer chunkData;
   /**
    * A list of deleted intervals.
@@ -38,10 +40,11 @@ public class Chunk implements Accountable {
 
   private long ramSize;
 
-  public Chunk(ChunkHeader header, ByteBuffer buffer, List<TimeRange> deleteIntervalList) {
+  public Chunk(ChunkHeader header, ByteBuffer buffer, List<TimeRange> deleteIntervalList, Statistics chunkStatistic) {
     this.chunkHeader = header;
     this.chunkData = buffer;
     this.deleteIntervalList = deleteIntervalList;
+    this.chunkStatistic = chunkStatistic;
   }
 
   public ChunkHeader getHeader() {
@@ -78,4 +81,8 @@ public class Chunk implements Accountable {
   public long getRamSize() {
     return ramSize;
   }
+
+  public Statistics getChunkStatistic() {
+    return chunkStatistic;
+  }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/CachedChunkLoaderImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/CachedChunkLoaderImpl.java
index 9c47e70..deb51b5 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/CachedChunkLoaderImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/CachedChunkLoaderImpl.java
@@ -61,7 +61,8 @@ public class CachedChunkLoaderImpl implements IChunkLoader {
   @Override
   public Chunk loadChunk(ChunkMetadata chunkMetaData) throws IOException {
     Chunk chunk = chunkCache.get(chunkMetaData);
-    return new Chunk(chunk.getHeader(), chunk.getData().duplicate(), chunk.getDeleteIntervalList());
+    return new Chunk(chunk.getHeader(), chunk.getData().duplicate(), chunk.getDeleteIntervalList(),
+        chunkMetaData.getStatistics());
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
index f747926..729b8f7 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
@@ -19,26 +19,27 @@
 
 package org.apache.iotdb.tsfile.read.reader.chunk;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.LinkedList;
+import java.util.List;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.compress.IUnCompressor;
 import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
+import org.apache.iotdb.tsfile.file.MetaMarker;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
+import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 import org.apache.iotdb.tsfile.read.common.BatchData;
 import org.apache.iotdb.tsfile.read.common.Chunk;
 import org.apache.iotdb.tsfile.read.common.TimeRange;
-import org.apache.iotdb.tsfile.read.reader.IPageReader;
 import org.apache.iotdb.tsfile.read.filter.basic.Filter;
 import org.apache.iotdb.tsfile.read.reader.IChunkReader;
+import org.apache.iotdb.tsfile.read.reader.IPageReader;
 import org.apache.iotdb.tsfile.read.reader.page.PageReader;
 
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.LinkedList;
-import java.util.List;
-
 public class ChunkReader implements IChunkReader {
 
   private ChunkHeader chunkHeader;
@@ -51,7 +52,7 @@ public class ChunkReader implements IChunkReader {
   protected Filter filter;
 
   private List<IPageReader> pageReaderList = new LinkedList<>();
-  
+
   private boolean isFromOldTsFile = false;
 
   /**
@@ -72,11 +73,11 @@ public class ChunkReader implements IChunkReader {
     chunkHeader = chunk.getHeader();
     this.unCompressor = IUnCompressor.getUnCompressor(chunkHeader.getCompressionType());
 
-
-    initAllPageReaders();
+    initAllPageReaders(chunk.getChunkStatistic());
   }
 
-  public ChunkReader(Chunk chunk, Filter filter, boolean isFromOldFile) throws IOException {
+  public ChunkReader(Chunk chunk, Filter filter, boolean isFromOldFile)
+      throws IOException {
     this.filter = filter;
     this.chunkDataBuffer = chunk.getData();
     this.deleteIntervalList = chunk.getDeleteIntervalList();
@@ -84,14 +85,19 @@ public class ChunkReader implements IChunkReader {
     this.unCompressor = IUnCompressor.getUnCompressor(chunkHeader.getCompressionType());
     this.isFromOldTsFile = isFromOldFile;
 
-    initAllPageReaders();
+    initAllPageReaders(chunk.getChunkStatistic());
   }
 
-  private void initAllPageReaders() throws IOException {
+  private void initAllPageReaders(Statistics chunkStatistic) throws IOException {
     // construct next satisfied page header
     while (chunkDataBuffer.remaining() > 0) {
       // deserialize a PageHeader from chunkDataBuffer
-      PageHeader pageHeader = PageHeader.deserializeFrom(chunkDataBuffer, chunkHeader.getDataType());
+      PageHeader pageHeader;
+      if (chunkHeader.getChunkType() == MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER) {
+        pageHeader = PageHeader.deserializeFrom(chunkDataBuffer, chunkStatistic);
+      } else {
+        pageHeader = PageHeader.deserializeFrom(chunkDataBuffer, chunkHeader.getDataType());
+      }
       // if the current page satisfies
       if (pageSatisfied(pageHeader)) {
         pageReaderList.add(constructPageReaderForNextPage(pageHeader));
@@ -102,7 +108,6 @@ public class ChunkReader implements IChunkReader {
   }
 
 
-
   /**
    * judge if has next page whose page header satisfies the filter.
    */
@@ -156,9 +161,9 @@ public class ChunkReader implements IChunkReader {
 
     chunkDataBuffer.get(compressedPageBody);
     Decoder valueDecoder = Decoder
-            .getDecoderByType(chunkHeader.getEncodingType(), chunkHeader.getDataType());
+        .getDecoderByType(chunkHeader.getEncodingType(), chunkHeader.getDataType());
     byte[] uncompressedPageData = new byte[pageHeader.getUncompressedSize()];
-    unCompressor.uncompress(compressedPageBody,0, compressedPageBodyLength,
+    unCompressor.uncompress(compressedPageBody, 0, compressedPageBodyLength,
         uncompressedPageData, 0);
     ByteBuffer pageData = ByteBuffer.wrap(uncompressedPageData);
     PageReader reader = new PageReader(pageHeader, pageData, chunkHeader.getDataType(),
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
index 31eb8c7..ad251e5 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
@@ -660,7 +660,7 @@ public class ReadWriteIOUtils {
    * string length's type is varInt
    */
   public static String readVarIntString(ByteBuffer buffer) {
-    int strLength = readInt(buffer);
+    int strLength = ReadWriteForEncodingUtils.readVarInt(buffer);
     if (strLength < 0) {
       return null;
     } else if (strLength == 0) {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index 292e620..c7eefe0 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -48,7 +48,6 @@ import org.apache.iotdb.tsfile.read.common.Path;
 import org.apache.iotdb.tsfile.utils.BytesUtils;
 import org.apache.iotdb.tsfile.utils.Pair;
 import org.apache.iotdb.tsfile.utils.PublicBAOS;
-import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 import org.apache.iotdb.tsfile.utils.VersionUtils;
 import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
@@ -253,7 +252,7 @@ public class TsFileIOWriter {
     }
 
     // write TsFileMetaData size
-    ReadWriteForEncodingUtils.writeUnsignedVarInt(size, out.wrapAsStream());// write the size of the file metadata.
+    ReadWriteIOUtils.write(size, out.wrapAsStream());// write the size of the file metadata.
 
     // write magic string
     out.write(magicStringBytes);


[iotdb] 10/11: fuck bug day

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit d5c34e8d2e378513e765779968c3b15964e9a32f
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Tue Dec 1 21:05:43 2020 +0800

    fuck bug day
---
 .../main/java/org/apache/iotdb/JDBCExample.java    | 26 ++++++----
 .../apache/iotdb/tsfile/TsFileSequenceRead.java    |  3 +-
 .../engine/merge/MaxFileMergeFileSelectorTest.java |  4 +-
 .../merge/MaxSeriesMergeFileSelectorTest.java      |  8 ++--
 .../tsfile/encoding/encoder/PlainEncoder.java      |  6 ++-
 .../iotdb/tsfile/file/header/ChunkHeader.java      | 18 +++++--
 .../metadata/statistics/BooleanStatistics.java     |  5 +-
 .../org/apache/iotdb/tsfile/read/common/Chunk.java | 56 +++++++++++++++++++---
 .../tsfile/read/reader/chunk/ChunkReader.java      | 13 ++++-
 .../iotdb/tsfile/write/chunk/ChunkWriterImpl.java  |  2 +
 10 files changed, 107 insertions(+), 34 deletions(-)

diff --git a/example/jdbc/src/main/java/org/apache/iotdb/JDBCExample.java b/example/jdbc/src/main/java/org/apache/iotdb/JDBCExample.java
index 00f1084..e8c05ce 100644
--- a/example/jdbc/src/main/java/org/apache/iotdb/JDBCExample.java
+++ b/example/jdbc/src/main/java/org/apache/iotdb/JDBCExample.java
@@ -28,21 +28,28 @@ import java.sql.SQLException;
 import java.sql.Statement;
 
 public class JDBCExample {
+
   public static void main(String[] args) throws ClassNotFoundException, SQLException {
     Class.forName("org.apache.iotdb.jdbc.IoTDBDriver");
-    try (Connection connection = DriverManager.getConnection("jdbc:iotdb://127.0.0.1:6667/", "root", "root");
-      Statement statement = connection.createStatement()) {
+    try (Connection connection = DriverManager
+        .getConnection("jdbc:iotdb://127.0.0.1:6667/", "root", "root");
+        Statement statement = connection.createStatement()) {
       try {
         statement.execute("SET STORAGE GROUP TO root.sg1");
-        statement.execute("CREATE TIMESERIES root.sg1.d1.s1 WITH DATATYPE=INT64, ENCODING=RLE, COMPRESSOR=SNAPPY");
-        statement.execute("CREATE TIMESERIES root.sg1.d1.s2 WITH DATATYPE=INT64, ENCODING=RLE, COMPRESSOR=SNAPPY");
-        statement.execute("CREATE TIMESERIES root.sg1.d1.s3 WITH DATATYPE=INT64, ENCODING=RLE, COMPRESSOR=SNAPPY");
+        statement.execute(
+            "CREATE TIMESERIES root.sg1.d1.s1 WITH DATATYPE=INT64, ENCODING=RLE, COMPRESSOR=SNAPPY");
+        statement.execute(
+            "CREATE TIMESERIES root.sg1.d1.s2 WITH DATATYPE=INT64, ENCODING=RLE, COMPRESSOR=SNAPPY");
+        statement.execute(
+            "CREATE TIMESERIES root.sg1.d1.s3 WITH DATATYPE=INT64, ENCODING=RLE, COMPRESSOR=SNAPPY");
       } catch (IoTDBSQLException e) {
         System.out.println(e.getMessage());
       }
 
       for (int i = 0; i <= 100; i++) {
-        statement.addBatch("insert into root.sg1.d1(timestamp, s1, s2, s3) values("+ i + "," + 1 + "," + 1 + "," + 1 + ")");
+        statement.addBatch(
+            "insert into root.sg1.d1(timestamp, s1, s2, s3) values(" + i + "," + 1 + "," + 1 + ","
+                + 1 + ")");
       }
       statement.executeBatch();
       statement.clearBatch();
@@ -51,10 +58,11 @@ public class JDBCExample {
       outputResult(resultSet);
       resultSet = statement.executeQuery("select count(*) from root");
       outputResult(resultSet);
-      resultSet = statement.executeQuery("select count(*) from root where time >= 1 and time <= 100 group by ([0, 100), 20ms, 20ms)");
+      resultSet = statement.executeQuery(
+          "select count(*) from root where time >= 1 and time <= 100 group by ([0, 100), 20ms, 20ms)");
       outputResult(resultSet);
-    } catch (IoTDBSQLException e){
-        System.out.println(e.getMessage());
+    } catch (IoTDBSQLException e) {
+      System.out.println(e.getMessage());
     }
   }
 
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
index 93ed9f0..a3ccda0 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
@@ -41,7 +41,7 @@ public class TsFileSequenceRead {
 
   @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning
   public static void main(String[] args) throws IOException {
-    String filename = "/Users/jackietien/Desktop/1-1-1-after.tsfile";
+    String filename = "test.tsfile";
     if (args.length >= 1) {
       filename = args[0];
     }
@@ -65,6 +65,7 @@ public class TsFileSequenceRead {
           case MetaMarker.CHUNK_HEADER:
           case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
             System.out.println("\t[Chunk]");
+            System.out.println("\tchunk type: " + marker);
             System.out.println("\tposition: " + reader.position());
             ChunkHeader header = reader.readChunkHeader(marker);
             System.out.println("\tMeasurement: " + header.getMeasurementID());
diff --git a/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxFileMergeFileSelectorTest.java b/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxFileMergeFileSelectorTest.java
index 1e50590..7036f75 100644
--- a/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxFileMergeFileSelectorTest.java
+++ b/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxFileMergeFileSelectorTest.java
@@ -78,8 +78,8 @@ public class MaxFileMergeFileSelectorTest extends MergeTest {
     List[] result = mergeFileSelector.select();
     List<TsFileResource> seqSelected = result[0];
     List<TsFileResource> unseqSelected = result[1];
-    assertEquals(seqResources.subList(0, 3), seqSelected);
-    assertEquals(unseqResources.subList(0, 3), unseqSelected);
+    assertEquals(seqResources.subList(0, 4), seqSelected);
+    assertEquals(unseqResources.subList(0, 4), unseqSelected);
     resource.clear();
   }
 }
diff --git a/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxSeriesMergeFileSelectorTest.java b/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxSeriesMergeFileSelectorTest.java
index 2a876d1..2e89ee5 100644
--- a/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxSeriesMergeFileSelectorTest.java
+++ b/server/src/test/java/org/apache/iotdb/db/engine/merge/MaxSeriesMergeFileSelectorTest.java
@@ -85,8 +85,8 @@ public class MaxSeriesMergeFileSelectorTest extends MergeTest {
     List[] result = mergeFileSelector.select();
     List<TsFileResource> seqSelected = result[0];
     List<TsFileResource> unseqSelected = result[1];
-    assertEquals(seqResources.subList(0, 3), seqSelected);
-    assertEquals(unseqResources.subList(0, 3), unseqSelected);
+    assertEquals(seqResources.subList(0, 4), seqSelected);
+    assertEquals(unseqResources.subList(0, 4), unseqSelected);
     assertEquals(MaxSeriesMergeFileSelector.MAX_SERIES_NUM,
         mergeFileSelector.getConcurrentMergeNum());
     resource.clear();
@@ -100,8 +100,8 @@ public class MaxSeriesMergeFileSelectorTest extends MergeTest {
     List[] result = mergeFileSelector.select();
     List<TsFileResource> seqSelected = result[0];
     List<TsFileResource> unseqSelected = result[1];
-    assertEquals(seqResources.subList(0, 1), seqSelected);
-    assertEquals(unseqResources.subList(0, 1), unseqSelected);
+    assertEquals(seqResources.subList(0, 2), seqSelected);
+    assertEquals(unseqResources.subList(0, 2), unseqSelected);
     resource.clear();
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
index 50bff74..5fe4ccd 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
@@ -72,7 +72,11 @@ public class PlainEncoder extends Encoder {
 
   @Override
   public void encode(float value, ByteArrayOutputStream out) {
-    encode(Float.floatToIntBits(value), out);
+    int floatInt = Float.floatToIntBits(value);
+    out.write((floatInt >> 24) & 0xFF);
+    out.write((floatInt >> 16) & 0xFF);
+    out.write((floatInt >> 8) & 0xFF);
+    out.write(floatInt & 0xFF);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
index 04942d8..082a4c7 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
@@ -35,6 +35,11 @@ import java.nio.ByteBuffer;
 public class ChunkHeader {
 
 
+  /**
+   * 1 means this chunk has more than one page, so each page has its own page statistic 4 means this
+   * chunk has only one page, and this page has no page statistic
+   */
+  private byte chunkType;
   private String measurementID;
   private int dataSize;
   private TSDataType dataType;
@@ -42,11 +47,6 @@ public class ChunkHeader {
   private TSEncoding encodingType;
 
   // the following fields do not need to be serialized.
-  /**
-   * 1 means this chunk has more than one page, so each page has its own page statistic 4 means this
-   * chunk has only one page, and this page has no page statistic
-   */
-  private byte chunkType;
   private int numOfPages;
   private int serializedSize;
 
@@ -226,10 +226,18 @@ public class ChunkHeader {
     this.numOfPages += chunkHeader.getNumOfPages();
   }
 
+  public void setDataSize(int dataSize) {
+    this.dataSize = dataSize;
+  }
+
   public byte getChunkType() {
     return chunkType;
   }
 
+  public void setChunkType(byte chunkType) {
+    this.chunkType = chunkType;
+  }
+
   public void increasePageNums(int i) {
     numOfPages += i;
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
index ddbdf34..201052f 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
@@ -220,10 +220,9 @@ public class BooleanStatistics extends Statistics<Boolean> {
 
   @Override
   public String toString() {
-    return "BooleanStatistics{" +
-        "firstValue=" + firstValue +
+    return super.toString() + " [firstValue=" + firstValue +
         ", lastValue=" + lastValue +
         ", sumValue=" + sumValue +
-        '}';
+        ']';
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java
index a7bbadb..58d15f8 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/common/Chunk.java
@@ -18,12 +18,15 @@
  */
 package org.apache.iotdb.tsfile.read.common;
 
+import java.io.IOException;
 import java.nio.ByteBuffer;
-
 import java.util.List;
 import org.apache.iotdb.tsfile.common.cache.Accountable;
+import org.apache.iotdb.tsfile.file.MetaMarker;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
+import org.apache.iotdb.tsfile.utils.PublicBAOS;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 
 /**
  * used in query.
@@ -63,12 +66,51 @@ public class Chunk implements Accountable {
     this.deleteIntervalList = list;
   }
 
-  public void mergeChunk(Chunk chunk) {
-    chunkHeader.mergeChunkHeader(chunk.chunkHeader);
-    ByteBuffer newChunkData = ByteBuffer
-        .allocate(chunkData.array().length + chunk.chunkData.array().length);
-    newChunkData.put(chunkData.array());
-    newChunkData.put(chunk.chunkData.array());
+  public void mergeChunk(Chunk chunk) throws IOException {
+    int dataSize = 0;
+    int offset1 = -1;
+    if (chunk.chunkHeader.getChunkType() == MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER) {
+      ReadWriteForEncodingUtils.readUnsignedVarInt(chunk.chunkData);
+      ReadWriteForEncodingUtils.readUnsignedVarInt(chunk.chunkData);
+      offset1 = chunk.chunkData.position();
+      chunk.chunkData.flip();
+      dataSize += (chunk.chunkData.array().length + chunk.chunkStatistic.getSerializedSize());
+    } else {
+      dataSize += chunk.chunkData.array().length;
+    }
+    int offset2 = -1;
+    if (chunkHeader.getChunkType() == MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER) {
+      chunkHeader.setChunkType(MetaMarker.CHUNK_HEADER);
+      ReadWriteForEncodingUtils.readUnsignedVarInt(chunkData);
+      ReadWriteForEncodingUtils.readUnsignedVarInt(chunkData);
+      offset2 = chunkData.position();
+      chunkData.flip();
+      dataSize += (chunkData.array().length + chunkStatistic.getSerializedSize());
+    } else {
+      dataSize += chunkData.array().length;
+    }
+    chunkHeader.setDataSize(dataSize);
+    ByteBuffer newChunkData = ByteBuffer.allocate(dataSize);
+    if (offset2 == -1) {
+      newChunkData.put(chunkData.array());
+    } else {
+      byte[] b = chunkData.array();
+      newChunkData.put(b, 0, offset2);
+      PublicBAOS a = new PublicBAOS();
+      chunkStatistic.serialize(a);
+      newChunkData.put(a.getBuf(), 0, a.size());
+      newChunkData.put(b, offset2, b.length - offset2);
+    }
+    if (offset1 == -1) {
+      newChunkData.put(chunk.chunkData.array());
+    } else {
+      byte[] b = chunk.chunkData.array();
+      newChunkData.put(b, 0, offset1);
+      PublicBAOS a = new PublicBAOS();
+      chunk.chunkStatistic.serialize(a);
+      newChunkData.put(a.getBuf(), 0, a.size());
+      newChunkData.put(b, offset1, b.length - offset1);
+    }
     chunkData = newChunkData;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
index 2250307..27a2518 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
@@ -163,8 +163,17 @@ public class ChunkReader implements IChunkReader {
     Decoder valueDecoder = Decoder
         .getDecoderByType(chunkHeader.getEncodingType(), chunkHeader.getDataType());
     byte[] uncompressedPageData = new byte[pageHeader.getUncompressedSize()];
-    unCompressor.uncompress(compressedPageBody, 0, compressedPageBodyLength,
-        uncompressedPageData, 0);
+    try {
+      unCompressor.uncompress(compressedPageBody, 0, compressedPageBodyLength,
+          uncompressedPageData, 0);
+    } catch (Exception e) {
+      System.out.println("error: ");
+      System.out.println("uncompress size: " + pageHeader.getUncompressedSize());
+      System.out.println("compressed size: " + pageHeader.getCompressedSize());
+      System.out.println("page header: " + pageHeader);
+      e.printStackTrace();
+    }
+
     ByteBuffer pageData = ByteBuffer.wrap(uncompressedPageData);
     PageReader reader = new PageReader(pageHeader, pageData, chunkHeader.getDataType(),
         valueDecoder, timeDecoder, filter);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
index 44c2dae..95c4c9d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/chunk/ChunkWriterImpl.java
@@ -237,6 +237,8 @@ public class ChunkWriterImpl implements IChunkWriter {
 
     // reinit this chunk writer
     pageBuffer.reset();
+    numOfPages = 0;
+    firstPageStatistics = null;
     this.statistics = Statistics.getStatsByType(measurementSchema.getType());
   }
 


[iotdb] 05/11: change encode and decode way

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 5470d7c3c1c8bb9b846fd9d755dd81fb0e1e3766
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Thu Nov 26 11:46:46 2020 +0800

    change encode and decode way
---
 .../apache/iotdb/tsfile/TsFileSequenceRead.java    |    2 +-
 .../iotdb/db/engine/upgrade/UpgradeTask.java       |  308 ++---
 .../iotdb/db/query/control/FileReaderManager.java  |    5 -
 .../java/org/apache/iotdb/db/service/IoTDB.java    |    2 +-
 .../org/apache/iotdb/db/service/UpgradeSevice.java |  244 ++--
 .../apache/iotdb/db/tools/TsFileSketchTool.java    |    2 +-
 .../db/tools/upgrade/TsFileOnlineUpgradeTool.java  | 1233 ++++++++++----------
 .../iotdb/tsfile/encoding/common/EndianType.java   |   26 -
 .../iotdb/tsfile/encoding/decoder/Decoder.java     |    8 +-
 .../tsfile/encoding/decoder/FloatDecoder.java      |   11 +-
 .../tsfile/encoding/decoder/IntRleDecoder.java     |   11 +-
 .../tsfile/encoding/decoder/LongRleDecoder.java    |   11 +-
 .../tsfile/encoding/decoder/PlainDecoder.java      |   21 +-
 .../iotdb/tsfile/encoding/decoder/RleDecoder.java  |   15 +-
 .../tsfile/encoding/encoder/FloatEncoder.java      |    6 +-
 .../tsfile/encoding/encoder/IntRleEncoder.java     |    8 +-
 .../tsfile/encoding/encoder/LongRleEncoder.java    |    6 +-
 .../tsfile/encoding/encoder/PlainEncoder.java      |   52 +-
 .../iotdb/tsfile/encoding/encoder/RleEncoder.java  |   20 +-
 .../tsfile/encoding/encoder/TSEncodingBuilder.java |   13 +-
 .../iotdb/tsfile/read/TsFileSequenceReader.java    |    6 +-
 .../tsfile/read/reader/chunk/ChunkReader.java      |    4 +-
 .../write/writer/RestorableTsFileIOWriter.java     |    1 -
 .../tsfile/encoding/decoder/IntRleDecoderTest.java |   83 +-
 .../encoding/decoder/LongRleDecoderTest.java       |   17 +-
 .../metadata/statistics/DoubleStatisticsTest.java  |   10 +-
 .../metadata/statistics/FloatStatisticsTest.java   |   11 +-
 .../metadata/statistics/IntegerStatisticsTest.java |   10 +-
 .../metadata/statistics/LongStatisticsTest.java    |   15 +-
 .../iotdb/tsfile/file/metadata/utils/Utils.java    |   36 +-
 .../tsfile/read/TsFileSequenceReaderTest.java      |    2 +-
 .../iotdb/tsfile/read/reader/PageReaderTest.java   |   54 +-
 .../iotdb/tsfile/write/TsFileIOWriterTest.java     |   19 +-
 .../iotdb/tsfile/write/writer/PageWriterTest.java  |   50 +-
 34 files changed, 1067 insertions(+), 1255 deletions(-)

diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
index f837b4b..e9314fa 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
@@ -94,7 +94,7 @@ public class TsFileSequenceRead {
             break;
           case MetaMarker.CHUNK_GROUP_HEADER:
             System.out.println("Chunk Group Footer position: " + reader.position());
-            ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupFooter();
+            ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupHeader();
             System.out.println("device: " + chunkGroupHeader.getDeviceID());
             break;
           case MetaMarker.VERSION:
diff --git a/server/src/main/java/org/apache/iotdb/db/engine/upgrade/UpgradeTask.java b/server/src/main/java/org/apache/iotdb/db/engine/upgrade/UpgradeTask.java
index 015cc01..b170797 100644
--- a/server/src/main/java/org/apache/iotdb/db/engine/upgrade/UpgradeTask.java
+++ b/server/src/main/java/org/apache/iotdb/db/engine/upgrade/UpgradeTask.java
@@ -1,154 +1,154 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.engine.upgrade;
-
-import static org.apache.iotdb.tsfile.common.constant.TsFileConstant.TSFILE_SUFFIX;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.List;
-import org.apache.iotdb.db.concurrent.WrappedRunnable;
-import org.apache.iotdb.db.conf.IoTDBConstant;
-import org.apache.iotdb.db.conf.IoTDBDescriptor;
-import org.apache.iotdb.db.engine.modification.ModificationFile;
-import org.apache.iotdb.db.engine.storagegroup.TsFileResource;
-import org.apache.iotdb.db.service.UpgradeSevice;
-import org.apache.iotdb.db.tools.upgrade.TsFileOnlineUpgradeTool;
-import org.apache.iotdb.tsfile.exception.write.WriteProcessException;
-import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
-import org.apache.iotdb.tsfile.fileSystem.fsFactory.FSFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class UpgradeTask extends WrappedRunnable {
-
-  private TsFileResource upgradeResource;
-  private static final Logger logger = LoggerFactory.getLogger(UpgradeTask.class);
-  private static final String COMMA_SEPERATOR = ",";
-  private static final int maxLevelNum = IoTDBDescriptor.getInstance().getConfig().getSeqLevelNum();
-
-  private FSFactory fsFactory = FSFactoryProducer.getFSFactory();
-
-  public UpgradeTask(TsFileResource upgradeResource) {
-    this.upgradeResource = upgradeResource;
-  }
-
-  @Override
-  public void runMayThrow() {
-    try {
-      List<TsFileResource> upgradedResources = generateUpgradedFiles();
-      upgradeResource.writeLock();
-      String oldTsfilePath = upgradeResource.getTsFile().getAbsolutePath();
-      String oldModificationFilePath = oldTsfilePath + ModificationFile.FILE_SUFFIX;
-      try {
-        // delete old TsFile and resource
-        upgradeResource.delete();
-        File modificationFile = FSFactoryProducer.getFSFactory().getFile(oldModificationFilePath);
-        // move upgraded TsFiles and modificationFile to their own partition directories
-        for (TsFileResource upgradedResource : upgradedResources) {
-          File upgradedFile = upgradedResource.getTsFile();
-          long partition = upgradedResource.getTimePartition();
-          String storageGroupPath = upgradedFile.getParentFile().getParentFile().getParent();
-          File partitionDir = FSFactoryProducer.getFSFactory()
-              .getFile(storageGroupPath, partition + "");
-          if (!partitionDir.exists()) {
-            partitionDir.mkdir();
-          }
-          FSFactoryProducer.getFSFactory().moveFile(upgradedFile,
-              FSFactoryProducer.getFSFactory().getFile(partitionDir, upgradedFile.getName()));
-          upgradedResource.setFile(
-              FSFactoryProducer.getFSFactory().getFile(partitionDir, upgradedFile.getName()));
-          // copy mods file to partition directories
-          if (modificationFile.exists()) {
-            Files.copy(modificationFile.toPath(),
-                FSFactoryProducer.getFSFactory().getFile(partitionDir, upgradedFile.getName()
-                    + ModificationFile.FILE_SUFFIX).toPath());
-          }
-          upgradedResource.serialize();
-          // delete tmp partition folder when it is empty
-          if (upgradedFile.getParentFile().isDirectory()
-              && upgradedFile.getParentFile().listFiles().length == 0) {
-            Files.delete(upgradedFile.getParentFile().toPath());
-          }
-          // rename all files to 0 level
-          upgradedFile = upgradedResource.getTsFile();
-          File zeroMergeVersionFile = getMaxMergeVersionFile(upgradedFile);
-          fsFactory.moveFile(upgradedFile, zeroMergeVersionFile);
-          fsFactory.moveFile(
-              fsFactory.getFile(upgradedFile.getAbsolutePath() + TsFileResource.RESOURCE_SUFFIX),
-              fsFactory
-                  .getFile(
-                      zeroMergeVersionFile.getAbsolutePath() + TsFileResource.RESOURCE_SUFFIX));
-          upgradedResource.setFile(upgradedFile);
-        }
-        // delete old modificationFile 
-        if (modificationFile.exists()) {
-          Files.delete(modificationFile.toPath());
-        }
-        // delete upgrade folder when it is empty
-        if (upgradeResource.getTsFile().getParentFile().isDirectory()
-            && upgradeResource.getTsFile().getParentFile().listFiles().length == 0) {
-          Files.delete(upgradeResource.getTsFile().getParentFile().toPath());
-        }
-        upgradeResource.setUpgradedResources(upgradedResources);
-        UpgradeLog.writeUpgradeLogFile(
-            oldTsfilePath + COMMA_SEPERATOR + UpgradeCheckStatus.UPGRADE_SUCCESS);
-        upgradeResource.getUpgradeTsFileResourceCallBack().call(upgradeResource);
-      } finally {
-        upgradeResource.writeUnlock();
-      }
-      UpgradeSevice.setCntUpgradeFileNum(UpgradeSevice.getCntUpgradeFileNum() - 1);
-      logger.info("Upgrade completes, file path:{} , the remaining upgraded file num: {}",
-          oldTsfilePath, UpgradeSevice.getCntUpgradeFileNum());
-    } catch (Exception e) {
-      logger.error("meet error when upgrade file:{}", upgradeResource.getTsFile().getAbsolutePath(),
-          e);
-    }
-  }
-
-  private List<TsFileResource> generateUpgradedFiles() throws WriteProcessException {
-    upgradeResource.readLock();
-    String oldTsfilePath = upgradeResource.getTsFile().getAbsolutePath();
-    List<TsFileResource> upgradedResources = new ArrayList<>();
-    UpgradeLog.writeUpgradeLogFile(
-        oldTsfilePath + COMMA_SEPERATOR + UpgradeCheckStatus.BEGIN_UPGRADE_FILE);
-    try {
-      TsFileOnlineUpgradeTool.upgradeOneTsfile(oldTsfilePath, upgradedResources);
-      UpgradeLog.writeUpgradeLogFile(
-          oldTsfilePath + COMMA_SEPERATOR + UpgradeCheckStatus.AFTER_UPGRADE_FILE);
-    } catch (IOException e) {
-      logger
-          .error("generate upgrade file failed, the file to be upgraded:{}", oldTsfilePath, e);
-    } finally {
-      upgradeResource.readUnlock();
-    }
-    return upgradedResources;
-  }
-
-  private File getMaxMergeVersionFile(File seqFile) {
-    String[] splits = seqFile.getName().replace(TSFILE_SUFFIX, "")
-        .split(IoTDBConstant.FILE_NAME_SEPARATOR);
-    return fsFactory.getFile(seqFile.getParentFile(),
-        splits[0] + IoTDBConstant.FILE_NAME_SEPARATOR + splits[1]
-            + IoTDBConstant.FILE_NAME_SEPARATOR + (maxLevelNum - 1) + TSFILE_SUFFIX);
-  }
-
-}
+///*
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *     http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// */
+//package org.apache.iotdb.db.engine.upgrade;
+//
+//import static org.apache.iotdb.tsfile.common.constant.TsFileConstant.TSFILE_SUFFIX;
+//
+//import java.io.File;
+//import java.io.IOException;
+//import java.nio.file.Files;
+//import java.util.ArrayList;
+//import java.util.List;
+//import org.apache.iotdb.db.concurrent.WrappedRunnable;
+//import org.apache.iotdb.db.conf.IoTDBConstant;
+//import org.apache.iotdb.db.conf.IoTDBDescriptor;
+//import org.apache.iotdb.db.engine.modification.ModificationFile;
+//import org.apache.iotdb.db.engine.storagegroup.TsFileResource;
+//import org.apache.iotdb.db.service.UpgradeSevice;
+//import org.apache.iotdb.db.tools.upgrade.TsFileOnlineUpgradeTool;
+//import org.apache.iotdb.tsfile.exception.write.WriteProcessException;
+//import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
+//import org.apache.iotdb.tsfile.fileSystem.fsFactory.FSFactory;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class UpgradeTask extends WrappedRunnable {
+//
+//  private TsFileResource upgradeResource;
+//  private static final Logger logger = LoggerFactory.getLogger(UpgradeTask.class);
+//  private static final String COMMA_SEPERATOR = ",";
+//  private static final int maxLevelNum = IoTDBDescriptor.getInstance().getConfig().getSeqLevelNum();
+//
+//  private FSFactory fsFactory = FSFactoryProducer.getFSFactory();
+//
+//  public UpgradeTask(TsFileResource upgradeResource) {
+//    this.upgradeResource = upgradeResource;
+//  }
+//
+//  @Override
+//  public void runMayThrow() {
+//    try {
+//      List<TsFileResource> upgradedResources = generateUpgradedFiles();
+//      upgradeResource.writeLock();
+//      String oldTsfilePath = upgradeResource.getTsFile().getAbsolutePath();
+//      String oldModificationFilePath = oldTsfilePath + ModificationFile.FILE_SUFFIX;
+//      try {
+//        // delete old TsFile and resource
+//        upgradeResource.delete();
+//        File modificationFile = FSFactoryProducer.getFSFactory().getFile(oldModificationFilePath);
+//        // move upgraded TsFiles and modificationFile to their own partition directories
+//        for (TsFileResource upgradedResource : upgradedResources) {
+//          File upgradedFile = upgradedResource.getTsFile();
+//          long partition = upgradedResource.getTimePartition();
+//          String storageGroupPath = upgradedFile.getParentFile().getParentFile().getParent();
+//          File partitionDir = FSFactoryProducer.getFSFactory()
+//              .getFile(storageGroupPath, partition + "");
+//          if (!partitionDir.exists()) {
+//            partitionDir.mkdir();
+//          }
+//          FSFactoryProducer.getFSFactory().moveFile(upgradedFile,
+//              FSFactoryProducer.getFSFactory().getFile(partitionDir, upgradedFile.getName()));
+//          upgradedResource.setFile(
+//              FSFactoryProducer.getFSFactory().getFile(partitionDir, upgradedFile.getName()));
+//          // copy mods file to partition directories
+//          if (modificationFile.exists()) {
+//            Files.copy(modificationFile.toPath(),
+//                FSFactoryProducer.getFSFactory().getFile(partitionDir, upgradedFile.getName()
+//                    + ModificationFile.FILE_SUFFIX).toPath());
+//          }
+//          upgradedResource.serialize();
+//          // delete tmp partition folder when it is empty
+//          if (upgradedFile.getParentFile().isDirectory()
+//              && upgradedFile.getParentFile().listFiles().length == 0) {
+//            Files.delete(upgradedFile.getParentFile().toPath());
+//          }
+//          // rename all files to 0 level
+//          upgradedFile = upgradedResource.getTsFile();
+//          File zeroMergeVersionFile = getMaxMergeVersionFile(upgradedFile);
+//          fsFactory.moveFile(upgradedFile, zeroMergeVersionFile);
+//          fsFactory.moveFile(
+//              fsFactory.getFile(upgradedFile.getAbsolutePath() + TsFileResource.RESOURCE_SUFFIX),
+//              fsFactory
+//                  .getFile(
+//                      zeroMergeVersionFile.getAbsolutePath() + TsFileResource.RESOURCE_SUFFIX));
+//          upgradedResource.setFile(upgradedFile);
+//        }
+//        // delete old modificationFile
+//        if (modificationFile.exists()) {
+//          Files.delete(modificationFile.toPath());
+//        }
+//        // delete upgrade folder when it is empty
+//        if (upgradeResource.getTsFile().getParentFile().isDirectory()
+//            && upgradeResource.getTsFile().getParentFile().listFiles().length == 0) {
+//          Files.delete(upgradeResource.getTsFile().getParentFile().toPath());
+//        }
+//        upgradeResource.setUpgradedResources(upgradedResources);
+//        UpgradeLog.writeUpgradeLogFile(
+//            oldTsfilePath + COMMA_SEPERATOR + UpgradeCheckStatus.UPGRADE_SUCCESS);
+//        upgradeResource.getUpgradeTsFileResourceCallBack().call(upgradeResource);
+//      } finally {
+//        upgradeResource.writeUnlock();
+//      }
+//      UpgradeSevice.setCntUpgradeFileNum(UpgradeSevice.getCntUpgradeFileNum() - 1);
+//      logger.info("Upgrade completes, file path:{} , the remaining upgraded file num: {}",
+//          oldTsfilePath, UpgradeSevice.getCntUpgradeFileNum());
+//    } catch (Exception e) {
+//      logger.error("meet error when upgrade file:{}", upgradeResource.getTsFile().getAbsolutePath(),
+//          e);
+//    }
+//  }
+//
+//  private List<TsFileResource> generateUpgradedFiles() throws WriteProcessException {
+//    upgradeResource.readLock();
+//    String oldTsfilePath = upgradeResource.getTsFile().getAbsolutePath();
+//    List<TsFileResource> upgradedResources = new ArrayList<>();
+//    UpgradeLog.writeUpgradeLogFile(
+//        oldTsfilePath + COMMA_SEPERATOR + UpgradeCheckStatus.BEGIN_UPGRADE_FILE);
+//    try {
+//      TsFileOnlineUpgradeTool.upgradeOneTsfile(oldTsfilePath, upgradedResources);
+//      UpgradeLog.writeUpgradeLogFile(
+//          oldTsfilePath + COMMA_SEPERATOR + UpgradeCheckStatus.AFTER_UPGRADE_FILE);
+//    } catch (IOException e) {
+//      logger
+//          .error("generate upgrade file failed, the file to be upgraded:{}", oldTsfilePath, e);
+//    } finally {
+//      upgradeResource.readUnlock();
+//    }
+//    return upgradedResources;
+//  }
+//
+//  private File getMaxMergeVersionFile(File seqFile) {
+//    String[] splits = seqFile.getName().replace(TSFILE_SUFFIX, "")
+//        .split(IoTDBConstant.FILE_NAME_SEPARATOR);
+//    return fsFactory.getFile(seqFile.getParentFile(),
+//        splits[0] + IoTDBConstant.FILE_NAME_SEPARATOR + splits[1]
+//            + IoTDBConstant.FILE_NAME_SEPARATOR + (maxLevelNum - 1) + TSFILE_SUFFIX);
+//  }
+//
+//}
diff --git a/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java b/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
index 776fcdb..9a9eb18 100644
--- a/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
+++ b/server/src/main/java/org/apache/iotdb/db/query/control/FileReaderManager.java
@@ -33,7 +33,6 @@ import org.apache.iotdb.db.service.ServiceType;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
 import org.apache.iotdb.tsfile.read.UnClosedTsFileReader;
-import org.apache.iotdb.tsfile.v1.read.TsFileSequenceReaderForV1;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -168,10 +167,6 @@ public class FileReaderManager implements IService {
       else {
         tsFileReader = new TsFileSequenceReader(filePath);
         switch (tsFileReader.readVersionNumber()) {
-          case TSFileConfig.VERSION_NUMBER_V1:
-            tsFileReader.close();
-            tsFileReader = new TsFileSequenceReaderForV1(filePath);
-            break;
           case TSFileConfig.VERSION_NUMBER_V2:
             break;
           default:
diff --git a/server/src/main/java/org/apache/iotdb/db/service/IoTDB.java b/server/src/main/java/org/apache/iotdb/db/service/IoTDB.java
index f56e7ca..60d7095 100644
--- a/server/src/main/java/org/apache/iotdb/db/service/IoTDB.java
+++ b/server/src/main/java/org/apache/iotdb/db/service/IoTDB.java
@@ -137,7 +137,7 @@ public class IoTDB implements IoTDBMBean {
     }
 
     registerManager.register(SyncServerManager.getInstance());
-    registerManager.register(UpgradeSevice.getINSTANCE());
+//    registerManager.register(UpgradeSevice.getINSTANCE());
     registerManager.register(MergeManager.getINSTANCE());
     registerManager.register(CompactionMergeTaskPoolManager.getInstance());
 
diff --git a/server/src/main/java/org/apache/iotdb/db/service/UpgradeSevice.java b/server/src/main/java/org/apache/iotdb/db/service/UpgradeSevice.java
index b7d1b89..d3260c7 100644
--- a/server/src/main/java/org/apache/iotdb/db/service/UpgradeSevice.java
+++ b/server/src/main/java/org/apache/iotdb/db/service/UpgradeSevice.java
@@ -1,122 +1,122 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.service;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.iotdb.db.conf.IoTDBDescriptor;
-import org.apache.iotdb.db.engine.StorageEngine;
-import org.apache.iotdb.db.engine.upgrade.UpgradeLog;
-import org.apache.iotdb.db.engine.upgrade.UpgradeTask;
-import org.apache.iotdb.db.exception.StartupException;
-import org.apache.iotdb.db.exception.StorageEngineException;
-import org.apache.iotdb.db.utils.UpgradeUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class UpgradeSevice implements IService {
-
-  private static final Logger logger = LoggerFactory.getLogger(UpgradeSevice.class);
-
-  private static final UpgradeSevice INSTANCE = new UpgradeSevice();
-  private ExecutorService upgradeThreadPool;
-  private AtomicInteger threadCnt = new AtomicInteger();
-  private static int cntUpgradeFileNum;
-
-
-  private UpgradeSevice() {
-  }
-
-  public static UpgradeSevice getINSTANCE() {
-    return INSTANCE;
-  }
-
-  @Override
-  public void start() throws StartupException {
-    int updateThreadNum = IoTDBDescriptor.getInstance().getConfig().getUpgradeThreadNum();
-    if (updateThreadNum <= 0) {
-      updateThreadNum = 1;
-    }
-    upgradeThreadPool = Executors.newFixedThreadPool(updateThreadNum,
-        r -> new Thread(r, "UpgradeThread-" + threadCnt.getAndIncrement()));
-    UpgradeLog.createUpgradeLog();
-    countUpgradeFiles();
-    if (cntUpgradeFileNum == 0) {
-      stop();
-      return;
-    }
-    upgradeAll();
-  }
-
-  @Override
-  public void stop() {
-    UpgradeLog.closeLogWriter();
-    if (upgradeThreadPool != null) {
-      upgradeThreadPool.shutdownNow();
-      logger.info("Waiting for upgrade task pool to shut down");
-      while (!upgradeThreadPool.isTerminated()) {
-        // wait
-      }
-      upgradeThreadPool = null;
-      logger.info("Upgrade service stopped");
-    }
-  }
-
-  @Override
-  public ServiceType getID() {
-    return ServiceType.UPGRADE_SERVICE;
-  }
-
-
-  public static void setCntUpgradeFileNum(int cntUpgradeFileNum) {
-    UpgradeUtils.getCntUpgradeFileLock().writeLock().lock();
-    try {
-      UpgradeSevice.cntUpgradeFileNum = cntUpgradeFileNum;
-    } finally {
-      UpgradeUtils.getCntUpgradeFileLock().writeLock().unlock();
-    }
-  }
-
-  public static int getCntUpgradeFileNum() {
-    UpgradeUtils.getCntUpgradeFileLock().readLock().lock();
-    try {
-      return cntUpgradeFileNum;
-    } finally {
-      UpgradeUtils.getCntUpgradeFileLock().readLock().unlock();
-    }
-  }
-
-  public void submitUpgradeTask(UpgradeTask upgradeTask) {
-    upgradeThreadPool.submit(upgradeTask);
-  }
-
-  private static void countUpgradeFiles() {
-    cntUpgradeFileNum = StorageEngine.getInstance().countUpgradeFiles();
-    logger.info("finish counting upgrading files, total num:{}", cntUpgradeFileNum);
-  }
-
-  private static void upgradeAll() {
-    try {
-      StorageEngine.getInstance().upgradeAll();
-    } catch (StorageEngineException e) {
-      logger.error("Cannot perform a global upgrade because", e);
-    }
-  }
-}
+///*
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *     http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// */
+//package org.apache.iotdb.db.service;
+//
+//import java.util.concurrent.ExecutorService;
+//import java.util.concurrent.Executors;
+//import java.util.concurrent.atomic.AtomicInteger;
+//import org.apache.iotdb.db.conf.IoTDBDescriptor;
+//import org.apache.iotdb.db.engine.StorageEngine;
+//import org.apache.iotdb.db.engine.upgrade.UpgradeLog;
+//import org.apache.iotdb.db.engine.upgrade.UpgradeTask;
+//import org.apache.iotdb.db.exception.StartupException;
+//import org.apache.iotdb.db.exception.StorageEngineException;
+//import org.apache.iotdb.db.utils.UpgradeUtils;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class UpgradeSevice implements IService {
+//
+//  private static final Logger logger = LoggerFactory.getLogger(UpgradeSevice.class);
+//
+//  private static final UpgradeSevice INSTANCE = new UpgradeSevice();
+//  private ExecutorService upgradeThreadPool;
+//  private AtomicInteger threadCnt = new AtomicInteger();
+//  private static int cntUpgradeFileNum;
+//
+//
+//  private UpgradeSevice() {
+//  }
+//
+//  public static UpgradeSevice getINSTANCE() {
+//    return INSTANCE;
+//  }
+//
+//  @Override
+//  public void start() throws StartupException {
+//    int updateThreadNum = IoTDBDescriptor.getInstance().getConfig().getUpgradeThreadNum();
+//    if (updateThreadNum <= 0) {
+//      updateThreadNum = 1;
+//    }
+//    upgradeThreadPool = Executors.newFixedThreadPool(updateThreadNum,
+//        r -> new Thread(r, "UpgradeThread-" + threadCnt.getAndIncrement()));
+//    UpgradeLog.createUpgradeLog();
+//    countUpgradeFiles();
+//    if (cntUpgradeFileNum == 0) {
+//      stop();
+//      return;
+//    }
+//    upgradeAll();
+//  }
+//
+//  @Override
+//  public void stop() {
+//    UpgradeLog.closeLogWriter();
+//    if (upgradeThreadPool != null) {
+//      upgradeThreadPool.shutdownNow();
+//      logger.info("Waiting for upgrade task pool to shut down");
+//      while (!upgradeThreadPool.isTerminated()) {
+//        // wait
+//      }
+//      upgradeThreadPool = null;
+//      logger.info("Upgrade service stopped");
+//    }
+//  }
+//
+//  @Override
+//  public ServiceType getID() {
+//    return ServiceType.UPGRADE_SERVICE;
+//  }
+//
+//
+//  public static void setCntUpgradeFileNum(int cntUpgradeFileNum) {
+//    UpgradeUtils.getCntUpgradeFileLock().writeLock().lock();
+//    try {
+//      UpgradeSevice.cntUpgradeFileNum = cntUpgradeFileNum;
+//    } finally {
+//      UpgradeUtils.getCntUpgradeFileLock().writeLock().unlock();
+//    }
+//  }
+//
+//  public static int getCntUpgradeFileNum() {
+//    UpgradeUtils.getCntUpgradeFileLock().readLock().lock();
+//    try {
+//      return cntUpgradeFileNum;
+//    } finally {
+//      UpgradeUtils.getCntUpgradeFileLock().readLock().unlock();
+//    }
+//  }
+//
+//  public void submitUpgradeTask(UpgradeTask upgradeTask) {
+//    upgradeThreadPool.submit(upgradeTask);
+//  }
+//
+//  private static void countUpgradeFiles() {
+//    cntUpgradeFileNum = StorageEngine.getInstance().countUpgradeFiles();
+//    logger.info("finish counting upgrading files, total num:{}", cntUpgradeFileNum);
+//  }
+//
+//  private static void upgradeAll() {
+//    try {
+//      StorageEngine.getInstance().upgradeAll();
+//    } catch (StorageEngineException e) {
+//      logger.error("Cannot perform a global upgrade because", e);
+//    }
+//  }
+//}
diff --git a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
index 779fb56..b72d99a 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
@@ -105,7 +105,7 @@ public class TsFileSketchTool {
           }
           // chunkGroupFooter begins
           printlnBoth(pw, String.format("%20s", chunkEndPos) + "|\t[Chunk Group Footer]");
-          ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupFooter(chunkEndPos, false);
+          ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupHeader(chunkEndPos, false);
           printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 0");
           printlnBoth(pw,
                   String.format("%20s", "") + "|\t\t[deviceID] " + chunkGroupHeader.getDeviceID());
diff --git a/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java b/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java
index 52b7eb3..7f75ce8 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/upgrade/TsFileOnlineUpgradeTool.java
@@ -1,619 +1,614 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.db.tools.upgrade;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import org.apache.iotdb.db.engine.StorageEngine;
-import org.apache.iotdb.db.engine.storagegroup.TsFileResource;
-import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
-import org.apache.iotdb.tsfile.compress.IUnCompressor;
-import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
-import org.apache.iotdb.tsfile.exception.write.PageException;
-import org.apache.iotdb.tsfile.exception.write.UnSupportedDataTypeException;
-import org.apache.iotdb.tsfile.exception.write.WriteProcessException;
-import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
-import org.apache.iotdb.tsfile.file.header.ChunkHeader;
-import org.apache.iotdb.tsfile.file.header.PageHeader;
-import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
-import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
-import org.apache.iotdb.tsfile.read.common.BatchData;
-import org.apache.iotdb.tsfile.read.reader.TsFileInput;
-import org.apache.iotdb.tsfile.read.reader.page.PageReader;
-import org.apache.iotdb.tsfile.utils.Binary;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-import org.apache.iotdb.tsfile.v1.file.metadata.ChunkGroupMetaDataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsDeviceMetadataIndexV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsDeviceMetadataV1;
-import org.apache.iotdb.tsfile.v1.file.metadata.TsFileMetadataV1;
-import org.apache.iotdb.tsfile.v1.file.utils.HeaderUtils;
-import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
-import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
-import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
-import org.apache.iotdb.tsfile.write.writer.TsFileIOWriter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TsFileOnlineUpgradeTool implements AutoCloseable {
-
-  private static final Logger logger = LoggerFactory.getLogger(TsFileOnlineUpgradeTool.class);
-
-  private TsFileInput tsFileInput;
-  private long fileMetadataPos;
-  private int fileMetadataSize;
-  private ByteBuffer markerBuffer = ByteBuffer.allocate(Byte.BYTES);
-  private Decoder defaultTimeDecoder = Decoder.getDecoderByType(
-      TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
-      TSDataType.INT64);
-  private Decoder valueDecoder;
-  protected String file;
-
-  // PartitionId -> TsFileIOWriter 
-  private Map<Long, TsFileIOWriter> partitionWriterMap;
-
-  /**
-   * Create a file reader of the given file. The reader will read the tail of the file to get the
-   * file metadata size.Then the reader will skip the first TSFileConfig.OLD_MAGIC_STRING.length()
-   * bytes of the file for preparing reading real data.
-   *
-   * @param file the data file
-   * @throws IOException If some I/O error occurs
-   */
-  public TsFileOnlineUpgradeTool(String file) throws IOException {
-    this(file, true);
-  }
-
-  /**
-   * construct function for TsfileOnlineUpgradeTool.
-   *
-   * @param file -given file name
-   * @param loadMetadataSize -load meta data size
-   */
-  public TsFileOnlineUpgradeTool(String file, boolean loadMetadataSize) throws IOException {
-    this.file = file;
-    tsFileInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(file);
-    partitionWriterMap = new HashMap<>();
-    try {
-      if (loadMetadataSize) {
-        loadMetadataSize();
-      }
-    } catch (Exception e) {
-      tsFileInput.close();
-      throw e;
-    }
-  }
-
-  /**
-   * upgrade a single tsfile
-   *
-   * @param tsFileName old version tsFile's absolute path
-   * @param upgradedResources new version tsFiles' resources
-   */
-  public static void upgradeOneTsfile(String tsFileName, List<TsFileResource> upgradedResources)
-      throws IOException, WriteProcessException {
-    try (TsFileOnlineUpgradeTool updater = new TsFileOnlineUpgradeTool(tsFileName)) {
-      updater.upgradeFile(upgradedResources);
-    }
-  }
-
-  /**
-   *
-   */
-  public void loadMetadataSize() throws IOException {
-    ByteBuffer metadataSize = ByteBuffer.allocate(Integer.BYTES);
-    tsFileInput.read(metadataSize,
-        tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES);
-    metadataSize.flip();
-    // read file metadata size and position
-    fileMetadataSize = ReadWriteIOUtils.readInt(metadataSize);
-    fileMetadataPos =
-        tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES
-            - fileMetadataSize;
-    // skip the magic header
-    position(TSFileConfig.MAGIC_STRING.length());
-  }
-
-  public String readTailMagic() throws IOException {
-    long totalSize = tsFileInput.size();
-
-    ByteBuffer magicStringBytes = ByteBuffer.allocate(TSFileConfig.MAGIC_STRING.length());
-    tsFileInput.read(magicStringBytes, totalSize - TSFileConfig.MAGIC_STRING.length());
-    magicStringBytes.flip();
-    return new String(magicStringBytes.array());
-  }
-
-  /**
-   * whether the file is a complete TsFile: only if the head magic and tail magic string exists.
-   */
-  public boolean isComplete() throws IOException {
-    return tsFileInput.size() >= TSFileConfig.MAGIC_STRING.length() * 2 && readTailMagic()
-        .equals(readHeadMagic());
-  }
-
-  /**
-   * this function does not modify the position of the file reader.
-   */
-  public String readHeadMagic() throws IOException {
-    return readHeadMagic(false);
-  }
-
-  /**
-   * @param movePosition whether move the position of the file reader after reading the magic header
-   * to the end of the magic head string.
-   */
-  public String readHeadMagic(boolean movePosition) throws IOException {
-    ByteBuffer magicStringBytes = ByteBuffer.allocate(TSFileConfig.MAGIC_STRING.length());
-    if (movePosition) {
-      tsFileInput.position(0);
-      tsFileInput.read(magicStringBytes);
-    } else {
-      tsFileInput.read(magicStringBytes, 0);
-    }
-    magicStringBytes.flip();
-    return new String(magicStringBytes.array());
-  }
-
-  /**
-   * this function reads version number and checks compatibility of TsFile.
-   */
-  public String readVersionNumber() throws IOException {
-    ByteBuffer versionNumberBytes = ByteBuffer
-        .allocate(TSFileConfig.VERSION_NUMBER_V2.getBytes().length);
-    tsFileInput.position(TSFileConfig.MAGIC_STRING.getBytes().length);
-    tsFileInput.read(versionNumberBytes);
-    versionNumberBytes.flip();
-    return new String(versionNumberBytes.array());
-  }
-
-  /**
-   * this function does not modify the position of the file reader.
-   */
-  public TsFileMetadataV1 readFileMetadata() throws IOException {
-    return TsFileMetadataV1.deserializeFrom(readData(fileMetadataPos, fileMetadataSize));
-  }
-
-  /**
-   * this function does not modify the position of the file reader.
-   */
-  public TsDeviceMetadataV1 readTsDeviceMetaData(TsDeviceMetadataIndexV1 index) throws IOException {
-    return TsDeviceMetadataV1.deserializeFrom(readData(index.getOffset(), index.getLen()));
-  }
-
-  /**
-   * read data from current position of the input, and deserialize it to a CHUNK_GROUP_FOOTER. <br>
-   * This method is not threadsafe.
-   *
-   * @return a CHUNK_GROUP_FOOTER
-   * @throws IOException io error
-   */
-  public ChunkGroupHeader readChunkGroupFooter() throws IOException {
-    return ChunkGroupHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
-  }
-
-  /**
-   * read data from current position of the input, and deserialize it to a CHUNK_HEADER. <br> This
-   * method is not threadsafe.
-   *
-   * @return a CHUNK_HEADER
-   * @throws IOException io error
-   */
-  public ChunkHeader readChunkHeader() throws IOException {
-    return HeaderUtils.deserializeChunkHeaderV1(tsFileInput.wrapAsInputStream(), true);
-  }
-
-  /**
-   * not thread safe.
-   *
-   * @param type given tsfile data type
-   */
-  public PageHeader readPageHeader(TSDataType type) throws IOException {
-    return HeaderUtils.deserializePageHeaderV1(tsFileInput.wrapAsInputStream(), type);
-  }
-
-  public ByteBuffer readPage(PageHeader header, CompressionType type)
-      throws IOException {
-    ByteBuffer buffer = readData(-1, header.getCompressedSize());
-    IUnCompressor unCompressor = IUnCompressor.getUnCompressor(type);
-    ByteBuffer uncompressedBuffer = ByteBuffer.allocate(header.getUncompressedSize());
-    if (type == CompressionType.UNCOMPRESSED) {
-      return buffer;
-    }
-    unCompressor.uncompress(buffer.array(), buffer.position(), buffer.remaining(),
-        uncompressedBuffer.array(),
-        0);
-    return uncompressedBuffer;
-  }
-
-  public ByteBuffer readCompressedPage(PageHeader header) throws IOException {
-    return readData(-1, header.getCompressedSize());
-  }
-
-  public long position() throws IOException {
-    return tsFileInput.position();
-  }
-
-  public void position(long offset) throws IOException {
-    tsFileInput.position(offset);
-  }
-
-  /**
-   * read one byte from the input. <br> this method is not thread safe
-   */
-  public byte readMarker() throws IOException {
-    markerBuffer.clear();
-    if (ReadWriteIOUtils.readAsPossible(tsFileInput, markerBuffer) == 0) {
-      throw new IOException("reach the end of the file.");
-    }
-    markerBuffer.flip();
-    return markerBuffer.get();
-  }
-
-  public byte readMarker(long position) throws IOException {
-    return readData(position, Byte.BYTES).get();
-  }
-
-  public void close() throws IOException {
-    this.tsFileInput.close();
-  }
-
-  public String getFileName() {
-    return this.file;
-  }
-
-  /**
-   * read data from tsFileInput, from the current position (if position = -1), or the given
-   * position. <br> if position = -1, the tsFileInput's position will be changed to the current
-   * position + real data size that been read. Other wise, the tsFileInput's position is not
-   * changed.
-   *
-   * @param position the start position of data in the tsFileInput, or the current position if
-   * position = -1
-   * @param size the size of data that want to read
-   * @return data that been read.
-   */
-  private ByteBuffer readData(long position, int size) throws IOException {
-    ByteBuffer buffer = ByteBuffer.allocate(size);
-    if (position == -1) {
-      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer) != size) {
-        throw new IOException("reach the end of the data");
-      }
-    } else {
-      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer, position, size) != size) {
-        throw new IOException("reach the end of the data");
-      }
-    }
-    buffer.flip();
-    return buffer;
-  }
-
-  /**
-   * upgrade file and resource
-   *
-   * @throws IOException, WriteProcessException
-   */
-  @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning
-  public void upgradeFile(List<TsFileResource> upgradedResources)
-      throws IOException, WriteProcessException {
-    File oldTsFile = FSFactoryProducer.getFSFactory().getFile(this.file);
-
-    // check if the old TsFile has correct header 
-    if (!fileCheck(oldTsFile)) {
-      return;
-    }
-
-    // ChunkGroupOffset -> version
-    Map<Long, Long> oldVersionInfo = getVersionInfo();
-
-    // start to scan chunks and chunkGroups
-    long startOffsetOfChunkGroup = 0;
-    boolean newChunkGroup = true;
-    long versionOfChunkGroup = 0;
-    int chunkGroupCount = 0;
-    List<List<PageHeader>> pageHeadersInChunkGroup = new ArrayList<>();
-    List<List<ByteBuffer>> pageDataInChunkGroup = new ArrayList<>();
-    List<List<Boolean>> pagePartitionInfoInChunkGroup = new ArrayList<>();
-    byte marker;
-    List<MeasurementSchema> measurementSchemaList = new ArrayList<>();
-    try {
-      while ((marker = this.readMarker()) != MetaMarker.SEPARATOR) {
-        switch (marker) {
-          case MetaMarker.CHUNK_HEADER:
-            // this is the first chunk of a new ChunkGroup.
-            if (newChunkGroup) {
-              newChunkGroup = false;
-              startOffsetOfChunkGroup = this.position() - 1;
-              versionOfChunkGroup = oldVersionInfo.get(startOffsetOfChunkGroup);
-            }
-            ChunkHeader header = this.readChunkHeader();
-            MeasurementSchema measurementSchema = new MeasurementSchema(header.getMeasurementID(),
-                header.getDataType(),
-                header.getEncodingType(),
-                header.getCompressionType());
-            measurementSchemaList.add(measurementSchema);
-            List<PageHeader> pageHeadersInChunk = new ArrayList<>();
-            List<ByteBuffer> dataInChunk = new ArrayList<>();
-            List<Boolean> pagePartitionInfo = new ArrayList<>();
-            for (int j = 0; j < header.getNumOfPages(); j++) {
-              PageHeader pageHeader = readPageHeader(header.getDataType());
-              boolean pageInSamePartition = checkIfPageInSameTimePartition(pageHeader);
-              pagePartitionInfo.add(pageInSamePartition);
-              ByteBuffer pageData = pageInSamePartition ?
-                  readCompressedPage(pageHeader)
-                  : readPage(pageHeader, header.getCompressionType());
-              pageHeadersInChunk.add(pageHeader);
-              dataInChunk.add(pageData);
-            }
-            pageHeadersInChunkGroup.add(pageHeadersInChunk);
-            pageDataInChunkGroup.add(dataInChunk);
-            pagePartitionInfoInChunkGroup.add(pagePartitionInfo);
-            break;
-          case MetaMarker.CHUNK_GROUP_HEADER:
-            // this is the footer of a ChunkGroup.
-            ChunkGroupHeader chunkGroupHeader = this.readChunkGroupFooter();
-            String deviceID = chunkGroupHeader.getDeviceID();
-            rewrite(oldTsFile, deviceID, measurementSchemaList, pageHeadersInChunkGroup,
-                pageDataInChunkGroup, versionOfChunkGroup, pagePartitionInfoInChunkGroup);
-
-            pageHeadersInChunkGroup.clear();
-            pageDataInChunkGroup.clear();
-            measurementSchemaList.clear();
-            pagePartitionInfoInChunkGroup.clear();
-            newChunkGroup = true;
-            chunkGroupCount++;
-            break;
-
-          default:
-            // the disk file is corrupted, using this file may be dangerous
-            logger.error("Unrecognized marker detected, this file may be corrupted");
-            return;
-        }
-      }
-      // close upgraded tsFiles and generate resources for them
-      for (TsFileIOWriter tsFileIOWriter : partitionWriterMap.values()) {
-        upgradedResources.add(endFileAndGenerateResource(tsFileIOWriter));
-      }
-    } catch (IOException e2) {
-      logger.info("TsFile upgrade process cannot proceed at position {} after {} chunk groups "
-          + "recovered, because : {}", this.position(), chunkGroupCount, e2.getMessage());
-    } finally {
-      if (tsFileInput != null) {
-        tsFileInput.close();
-      }
-    }
-  }
-
-  private boolean checkIfPageInSameTimePartition(PageHeader pageHeader) {
-    return StorageEngine.getTimePartition(pageHeader.getStartTime())
-        == StorageEngine.getTimePartition(pageHeader.getEndTime());
-  }
-
-  /**
-   * This method is for rewriting the ChunkGroup which data is in the different time partitions. In
-   * this case, we have to decode the data to points, and then rewrite the data points to different
-   * chunkWriters, finally write chunks to their own upgraded TsFiles
-   */
-  private void rewrite(File oldTsFile, String deviceId, List<MeasurementSchema> schemas,
-      List<List<PageHeader>> pageHeadersInChunkGroup, List<List<ByteBuffer>> dataInChunkGroup,
-      long versionOfChunkGroup, List<List<Boolean>> pagePartitionInfoInChunkGroup)
-      throws IOException, PageException {
-    Map<Long, Map<MeasurementSchema, ChunkWriterImpl>> chunkWritersInChunkGroup = new HashMap<>();
-    for (int i = 0; i < schemas.size(); i++) {
-      MeasurementSchema schema = schemas.get(i);
-      List<ByteBuffer> pageDataInChunk = dataInChunkGroup.get(i);
-      List<PageHeader> pageHeadersInChunk = pageHeadersInChunkGroup.get(i);
-      List<Boolean> pagePartitionInfo = pagePartitionInfoInChunkGroup.get(i);
-      valueDecoder = Decoder
-          .getDecoderByType(schema.getEncodingType(), schema.getType());
-      for (int j = 0; j < pageDataInChunk.size(); j++) {
-        if (Boolean.TRUE.equals(pagePartitionInfo.get(j))) {
-          writePageInSamePartitionToFile(oldTsFile, schema, pageHeadersInChunk.get(j),
-              pageDataInChunk.get(j), chunkWritersInChunkGroup);
-        } else {
-          writePageInDifferentPartitionsToFiles(oldTsFile, schema, pageDataInChunk.get(j),
-              chunkWritersInChunkGroup);
-        }
-      }
-    }
-
-    for (Entry<Long, Map<MeasurementSchema, ChunkWriterImpl>> entry : chunkWritersInChunkGroup
-        .entrySet()) {
-      long partitionId = entry.getKey();
-      TsFileIOWriter tsFileIOWriter = partitionWriterMap.get(partitionId);
-      tsFileIOWriter.startChunkGroup(deviceId);
-      // write chunks to their own upgraded tsFiles
-      for (IChunkWriter chunkWriter : entry.getValue().values()) {
-        chunkWriter.writeToFileWriter(tsFileIOWriter);
-      }
-      tsFileIOWriter.endChunkGroup();
-      tsFileIOWriter.writeVersion(versionOfChunkGroup);
-    }
-  }
-
-  private TsFileIOWriter getOrDefaultTsFileIOWriter(File oldTsFile, long partition) {
-    return partitionWriterMap.computeIfAbsent(partition, k ->
-        {
-          File partitionDir = FSFactoryProducer.getFSFactory().getFile(oldTsFile.getParent()
-              + File.separator + partition);
-          if (!partitionDir.exists()) {
-            partitionDir.mkdirs();
-          }
-          File newFile = FSFactoryProducer.getFSFactory().getFile(oldTsFile.getParent()
-              + File.separator + partition + File.separator + oldTsFile.getName());
-          try {
-            if (!newFile.createNewFile()) {
-              logger.error("The TsFile {} has been created ", newFile);
-              return null;
-            }
-            return new TsFileIOWriter(newFile);
-          } catch (IOException e) {
-            logger.error("Create new TsFile {} failed ", newFile);
-            return null;
-          }
-        }
-    );
-  }
-
-  private void writePageInSamePartitionToFile(File oldTsFile, MeasurementSchema schema,
-      PageHeader pageHeader,
-      ByteBuffer pageData,
-      Map<Long, Map<MeasurementSchema, ChunkWriterImpl>> chunkWritersInChunkGroup)
-      throws PageException {
-    long partitionId = StorageEngine.getTimePartition(pageHeader.getStartTime());
-    getOrDefaultTsFileIOWriter(oldTsFile, partitionId);
-    Map<MeasurementSchema, ChunkWriterImpl> chunkWriters = chunkWritersInChunkGroup
-        .getOrDefault(partitionId, new HashMap<>());
-    ChunkWriterImpl chunkWriter = chunkWriters
-        .getOrDefault(schema, new ChunkWriterImpl(schema));
-    chunkWriter.writePageHeaderAndDataIntoBuff(pageData, pageHeader);
-    chunkWriters.put(schema, chunkWriter);
-    chunkWritersInChunkGroup.put(partitionId, chunkWriters);
-  }
-
-  private void writePageInDifferentPartitionsToFiles(File oldTsFile, MeasurementSchema schema,
-      ByteBuffer pageData,
-      Map<Long, Map<MeasurementSchema, ChunkWriterImpl>> chunkWritersInChunkGroup)
-      throws IOException {
-    valueDecoder.reset();
-    PageReader pageReader = new PageReader(pageData, schema.getType(), valueDecoder,
-        defaultTimeDecoder, null);
-    BatchData batchData = pageReader.getAllSatisfiedPageData();
-    while (batchData.hasCurrent()) {
-      long time = batchData.currentTime();
-      Object value = batchData.currentValue();
-      long partitionId = StorageEngine.getTimePartition(time);
-
-      Map<MeasurementSchema, ChunkWriterImpl> chunkWriters = chunkWritersInChunkGroup
-          .getOrDefault(partitionId, new HashMap<>());
-      ChunkWriterImpl chunkWriter = chunkWriters
-          .getOrDefault(schema, new ChunkWriterImpl(schema));
-      getOrDefaultTsFileIOWriter(oldTsFile, partitionId);
-      switch (schema.getType()) {
-        case INT32:
-          chunkWriter.write(time, (int) value);
-          break;
-        case INT64:
-          chunkWriter.write(time, (long) value);
-          break;
-        case FLOAT:
-          chunkWriter.write(time, (float) value);
-          break;
-        case DOUBLE:
-          chunkWriter.write(time, (double) value);
-          break;
-        case BOOLEAN:
-          chunkWriter.write(time, (boolean) value);
-          break;
-        case TEXT:
-          chunkWriter.write(time, (Binary) value);
-          break;
-        default:
-          throw new UnSupportedDataTypeException(
-              String.format("Data type %s is not supported.", schema.getType()));
-      }
-      batchData.next();
-      chunkWriters.put(schema, chunkWriter);
-      chunkWritersInChunkGroup.put(partitionId, chunkWriters);
-    }
-  }
-
-  /**
-   * check if the file to be upgraded has correct magic strings and version number
-   */
-  private boolean fileCheck(File oldTsFile) throws IOException {
-    long fileSize;
-    if (!oldTsFile.exists()) {
-      logger.error("the file to be updated does not exist, file path: {}", oldTsFile.getPath());
-      return false;
-    } else {
-      fileSize = oldTsFile.length();
-    }
-
-    String magic = readHeadMagic(true);
-    if (!magic.equals(TSFileConfig.MAGIC_STRING)) {
-      logger.error("the file's MAGIC STRING is incorrect, file path: {}", oldTsFile.getPath());
-      return false;
-    }
-
-    String versionNumber = readVersionNumber();
-    if (!versionNumber.equals(TSFileConfig.VERSION_NUMBER_V1)) {
-      logger.error("the file's Version Number is incorrect, file path: {}", oldTsFile.getPath());
-      return false;
-    }
-
-    if (fileSize == TSFileConfig.MAGIC_STRING.length()) {
-      logger.error("the file only contains magic string, file path: {}", oldTsFile.getPath());
-      return false;
-    } else if (!readTailMagic().equals(TSFileConfig.MAGIC_STRING)) {
-      logger.error("the file cannot upgrade, file path: {}", oldTsFile.getPath());
-      return false;
-    }
-    return true;
-  }
-
-  private Map<Long, Long> getVersionInfo() throws IOException {
-    Map<Long, Long> versionInfo = new HashMap<>();
-    TsFileMetadataV1 fileMetadata = readFileMetadata();
-    List<TsDeviceMetadataV1> oldDeviceMetadataList = new ArrayList<>();
-    for (TsDeviceMetadataIndexV1 index : fileMetadata.getDeviceMap().values()) {
-      TsDeviceMetadataV1 oldDeviceMetadata = readTsDeviceMetaData(index);
-      oldDeviceMetadataList.add(oldDeviceMetadata);
-    }
-
-    for (TsDeviceMetadataV1 oldTsDeviceMetadata : oldDeviceMetadataList) {
-      for (ChunkGroupMetaDataV1 oldChunkGroupMetadata : oldTsDeviceMetadata
-          .getChunkGroupMetaDataList()) {
-        long version = oldChunkGroupMetadata.getVersion();
-        long offsetOfChunkGroup = oldChunkGroupMetadata.getStartOffsetOfChunkGroup();
-        // get version informations
-        versionInfo.put(offsetOfChunkGroup, version);
-      }
-    }
-    return versionInfo;
-  }
-
-  private TsFileResource endFileAndGenerateResource(TsFileIOWriter tsFileIOWriter)
-      throws IOException {
-    tsFileIOWriter.endFile();
-    TsFileResource tsFileResource = new TsFileResource(tsFileIOWriter.getFile());
-    Map<String, List<TimeseriesMetadata>> deviceTimeseriesMetadataMap = tsFileIOWriter
-        .getDeviceTimeseriesMetadataMap();
-    for (Map.Entry<String, List<TimeseriesMetadata>> entry : deviceTimeseriesMetadataMap
-        .entrySet()) {
-      String device = entry.getKey();
-      for (TimeseriesMetadata timeseriesMetaData : entry.getValue()) {
-        tsFileResource.updateStartTime(device, timeseriesMetaData.getStatistics().getStartTime());
-        tsFileResource.updateEndTime(device, timeseriesMetaData.getStatistics().getEndTime());
-      }
-    }
-    tsFileResource.setClosed(true);
-    return tsFileResource;
-  }
-
-}
\ No newline at end of file
+///*
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// *
+// *     http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing,
+// * software distributed under the License is distributed on an
+// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// * KIND, either express or implied.  See the License for the
+// * specific language governing permissions and limitations
+// * under the License.
+// */
+//package org.apache.iotdb.db.tools.upgrade;
+//
+//import java.io.File;
+//import java.io.IOException;
+//import java.nio.ByteBuffer;
+//import java.util.ArrayList;
+//import java.util.HashMap;
+//import java.util.List;
+//import java.util.Map;
+//import java.util.Map.Entry;
+//import org.apache.iotdb.db.engine.StorageEngine;
+//import org.apache.iotdb.db.engine.storagegroup.TsFileResource;
+//import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
+//import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
+//import org.apache.iotdb.tsfile.compress.IUnCompressor;
+//import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
+//import org.apache.iotdb.tsfile.exception.write.PageException;
+//import org.apache.iotdb.tsfile.exception.write.UnSupportedDataTypeException;
+//import org.apache.iotdb.tsfile.exception.write.WriteProcessException;
+//import org.apache.iotdb.tsfile.file.MetaMarker;
+//import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
+//import org.apache.iotdb.tsfile.file.header.ChunkHeader;
+//import org.apache.iotdb.tsfile.file.header.PageHeader;
+//import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
+//import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
+//import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+//import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
+//import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
+//import org.apache.iotdb.tsfile.read.common.BatchData;
+//import org.apache.iotdb.tsfile.read.reader.TsFileInput;
+//import org.apache.iotdb.tsfile.read.reader.page.PageReader;
+//import org.apache.iotdb.tsfile.utils.Binary;
+//import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
+//import org.apache.iotdb.tsfile.write.chunk.ChunkWriterImpl;
+//import org.apache.iotdb.tsfile.write.chunk.IChunkWriter;
+//import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+//import org.apache.iotdb.tsfile.write.writer.TsFileIOWriter;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//public class TsFileOnlineUpgradeTool implements AutoCloseable {
+//
+//  private static final Logger logger = LoggerFactory.getLogger(TsFileOnlineUpgradeTool.class);
+//
+//  private TsFileInput tsFileInput;
+//  private long fileMetadataPos;
+//  private int fileMetadataSize;
+//  private ByteBuffer markerBuffer = ByteBuffer.allocate(Byte.BYTES);
+//  private Decoder defaultTimeDecoder = Decoder.getDecoderByType(
+//      TSEncoding.valueOf(TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
+//      TSDataType.INT64);
+//  private Decoder valueDecoder;
+//  protected String file;
+//
+//  // PartitionId -> TsFileIOWriter
+//  private Map<Long, TsFileIOWriter> partitionWriterMap;
+//
+//  /**
+//   * Create a file reader of the given file. The reader will read the tail of the file to get the
+//   * file metadata size.Then the reader will skip the first TSFileConfig.OLD_MAGIC_STRING.length()
+//   * bytes of the file for preparing reading real data.
+//   *
+//   * @param file the data file
+//   * @throws IOException If some I/O error occurs
+//   */
+//  public TsFileOnlineUpgradeTool(String file) throws IOException {
+//    this(file, true);
+//  }
+//
+//  /**
+//   * construct function for TsfileOnlineUpgradeTool.
+//   *
+//   * @param file -given file name
+//   * @param loadMetadataSize -load meta data size
+//   */
+//  public TsFileOnlineUpgradeTool(String file, boolean loadMetadataSize) throws IOException {
+//    this.file = file;
+//    tsFileInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(file);
+//    partitionWriterMap = new HashMap<>();
+//    try {
+//      if (loadMetadataSize) {
+//        loadMetadataSize();
+//      }
+//    } catch (Exception e) {
+//      tsFileInput.close();
+//      throw e;
+//    }
+//  }
+//
+//  /**
+//   * upgrade a single tsfile
+//   *
+//   * @param tsFileName old version tsFile's absolute path
+//   * @param upgradedResources new version tsFiles' resources
+//   */
+//  public static void upgradeOneTsfile(String tsFileName, List<TsFileResource> upgradedResources)
+//      throws IOException, WriteProcessException {
+//    try (TsFileOnlineUpgradeTool updater = new TsFileOnlineUpgradeTool(tsFileName)) {
+//      updater.upgradeFile(upgradedResources);
+//    }
+//  }
+//
+//  /**
+//   *
+//   */
+//  public void loadMetadataSize() throws IOException {
+//    ByteBuffer metadataSize = ByteBuffer.allocate(Integer.BYTES);
+//    tsFileInput.read(metadataSize,
+//        tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES);
+//    metadataSize.flip();
+//    // read file metadata size and position
+//    fileMetadataSize = ReadWriteIOUtils.readInt(metadataSize);
+//    fileMetadataPos =
+//        tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES
+//            - fileMetadataSize;
+//    // skip the magic header
+//    position(TSFileConfig.MAGIC_STRING.length());
+//  }
+//
+//  public String readTailMagic() throws IOException {
+//    long totalSize = tsFileInput.size();
+//
+//    ByteBuffer magicStringBytes = ByteBuffer.allocate(TSFileConfig.MAGIC_STRING.length());
+//    tsFileInput.read(magicStringBytes, totalSize - TSFileConfig.MAGIC_STRING.length());
+//    magicStringBytes.flip();
+//    return new String(magicStringBytes.array());
+//  }
+//
+//  /**
+//   * whether the file is a complete TsFile: only if the head magic and tail magic string exists.
+//   */
+//  public boolean isComplete() throws IOException {
+//    return tsFileInput.size() >= TSFileConfig.MAGIC_STRING.length() * 2 && readTailMagic()
+//        .equals(readHeadMagic());
+//  }
+//
+//  /**
+//   * this function does not modify the position of the file reader.
+//   */
+//  public String readHeadMagic() throws IOException {
+//    return readHeadMagic(false);
+//  }
+//
+//  /**
+//   * @param movePosition whether move the position of the file reader after reading the magic header
+//   * to the end of the magic head string.
+//   */
+//  public String readHeadMagic(boolean movePosition) throws IOException {
+//    ByteBuffer magicStringBytes = ByteBuffer.allocate(TSFileConfig.MAGIC_STRING.length());
+//    if (movePosition) {
+//      tsFileInput.position(0);
+//      tsFileInput.read(magicStringBytes);
+//    } else {
+//      tsFileInput.read(magicStringBytes, 0);
+//    }
+//    magicStringBytes.flip();
+//    return new String(magicStringBytes.array());
+//  }
+//
+//  /**
+//   * this function reads version number and checks compatibility of TsFile.
+//   */
+//  public String readVersionNumber() throws IOException {
+//    ByteBuffer versionNumberBytes = ByteBuffer
+//        .allocate(TSFileConfig.VERSION_NUMBER_V2.getBytes().length);
+//    tsFileInput.position(TSFileConfig.MAGIC_STRING.getBytes().length);
+//    tsFileInput.read(versionNumberBytes);
+//    versionNumberBytes.flip();
+//    return new String(versionNumberBytes.array());
+//  }
+//
+//  /**
+//   * this function does not modify the position of the file reader.
+//   */
+//  public TsFileMetadataV1 readFileMetadata() throws IOException {
+//    return TsFileMetadataV1.deserializeFrom(readData(fileMetadataPos, fileMetadataSize));
+//  }
+//
+//  /**
+//   * this function does not modify the position of the file reader.
+//   */
+//  public TsDeviceMetadataV1 readTsDeviceMetaData(TsDeviceMetadataIndexV1 index) throws IOException {
+//    return TsDeviceMetadataV1.deserializeFrom(readData(index.getOffset(), index.getLen()));
+//  }
+//
+//  /**
+//   * read data from current position of the input, and deserialize it to a CHUNK_GROUP_FOOTER. <br>
+//   * This method is not threadsafe.
+//   *
+//   * @return a CHUNK_GROUP_FOOTER
+//   * @throws IOException io error
+//   */
+//  public ChunkGroupHeader readChunkGroupFooter() throws IOException {
+//    return ChunkGroupHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
+//  }
+//
+//  /**
+//   * read data from current position of the input, and deserialize it to a CHUNK_HEADER. <br> This
+//   * method is not threadsafe.
+//   *
+//   * @return a CHUNK_HEADER
+//   * @throws IOException io error
+//   */
+//  public ChunkHeader readChunkHeader() throws IOException {
+//    return HeaderUtils.deserializeChunkHeaderV1(tsFileInput.wrapAsInputStream(), true);
+//  }
+//
+//  /**
+//   * not thread safe.
+//   *
+//   * @param type given tsfile data type
+//   */
+//  public PageHeader readPageHeader(TSDataType type) throws IOException {
+//    return HeaderUtils.deserializePageHeaderV1(tsFileInput.wrapAsInputStream(), type);
+//  }
+//
+//  public ByteBuffer readPage(PageHeader header, CompressionType type)
+//      throws IOException {
+//    ByteBuffer buffer = readData(-1, header.getCompressedSize());
+//    IUnCompressor unCompressor = IUnCompressor.getUnCompressor(type);
+//    ByteBuffer uncompressedBuffer = ByteBuffer.allocate(header.getUncompressedSize());
+//    if (type == CompressionType.UNCOMPRESSED) {
+//      return buffer;
+//    }
+//    unCompressor.uncompress(buffer.array(), buffer.position(), buffer.remaining(),
+//        uncompressedBuffer.array(),
+//        0);
+//    return uncompressedBuffer;
+//  }
+//
+//  public ByteBuffer readCompressedPage(PageHeader header) throws IOException {
+//    return readData(-1, header.getCompressedSize());
+//  }
+//
+//  public long position() throws IOException {
+//    return tsFileInput.position();
+//  }
+//
+//  public void position(long offset) throws IOException {
+//    tsFileInput.position(offset);
+//  }
+//
+//  /**
+//   * read one byte from the input. <br> this method is not thread safe
+//   */
+//  public byte readMarker() throws IOException {
+//    markerBuffer.clear();
+//    if (ReadWriteIOUtils.readAsPossible(tsFileInput, markerBuffer) == 0) {
+//      throw new IOException("reach the end of the file.");
+//    }
+//    markerBuffer.flip();
+//    return markerBuffer.get();
+//  }
+//
+//  public byte readMarker(long position) throws IOException {
+//    return readData(position, Byte.BYTES).get();
+//  }
+//
+//  public void close() throws IOException {
+//    this.tsFileInput.close();
+//  }
+//
+//  public String getFileName() {
+//    return this.file;
+//  }
+//
+//  /**
+//   * read data from tsFileInput, from the current position (if position = -1), or the given
+//   * position. <br> if position = -1, the tsFileInput's position will be changed to the current
+//   * position + real data size that been read. Other wise, the tsFileInput's position is not
+//   * changed.
+//   *
+//   * @param position the start position of data in the tsFileInput, or the current position if
+//   * position = -1
+//   * @param size the size of data that want to read
+//   * @return data that been read.
+//   */
+//  private ByteBuffer readData(long position, int size) throws IOException {
+//    ByteBuffer buffer = ByteBuffer.allocate(size);
+//    if (position == -1) {
+//      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer) != size) {
+//        throw new IOException("reach the end of the data");
+//      }
+//    } else {
+//      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer, position, size) != size) {
+//        throw new IOException("reach the end of the data");
+//      }
+//    }
+//    buffer.flip();
+//    return buffer;
+//  }
+//
+//  /**
+//   * upgrade file and resource
+//   *
+//   * @throws IOException, WriteProcessException
+//   */
+//  @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning
+//  public void upgradeFile(List<TsFileResource> upgradedResources)
+//      throws IOException, WriteProcessException {
+//    File oldTsFile = FSFactoryProducer.getFSFactory().getFile(this.file);
+//
+//    // check if the old TsFile has correct header
+//    if (!fileCheck(oldTsFile)) {
+//      return;
+//    }
+//
+//    // ChunkGroupOffset -> version
+//    Map<Long, Long> oldVersionInfo = getVersionInfo();
+//
+//    // start to scan chunks and chunkGroups
+//    long startOffsetOfChunkGroup = 0;
+//    boolean newChunkGroup = true;
+//    long versionOfChunkGroup = 0;
+//    int chunkGroupCount = 0;
+//    List<List<PageHeader>> pageHeadersInChunkGroup = new ArrayList<>();
+//    List<List<ByteBuffer>> pageDataInChunkGroup = new ArrayList<>();
+//    List<List<Boolean>> pagePartitionInfoInChunkGroup = new ArrayList<>();
+//    byte marker;
+//    List<MeasurementSchema> measurementSchemaList = new ArrayList<>();
+//    try {
+//      while ((marker = this.readMarker()) != MetaMarker.SEPARATOR) {
+//        switch (marker) {
+//          case MetaMarker.CHUNK_HEADER:
+//            // this is the first chunk of a new ChunkGroup.
+//            if (newChunkGroup) {
+//              newChunkGroup = false;
+//              startOffsetOfChunkGroup = this.position() - 1;
+//              versionOfChunkGroup = oldVersionInfo.get(startOffsetOfChunkGroup);
+//            }
+//            ChunkHeader header = this.readChunkHeader();
+//            MeasurementSchema measurementSchema = new MeasurementSchema(header.getMeasurementID(),
+//                header.getDataType(),
+//                header.getEncodingType(),
+//                header.getCompressionType());
+//            measurementSchemaList.add(measurementSchema);
+//            List<PageHeader> pageHeadersInChunk = new ArrayList<>();
+//            List<ByteBuffer> dataInChunk = new ArrayList<>();
+//            List<Boolean> pagePartitionInfo = new ArrayList<>();
+//            for (int j = 0; j < header.getNumOfPages(); j++) {
+//              PageHeader pageHeader = readPageHeader(header.getDataType());
+//              boolean pageInSamePartition = checkIfPageInSameTimePartition(pageHeader);
+//              pagePartitionInfo.add(pageInSamePartition);
+//              ByteBuffer pageData = pageInSamePartition ?
+//                  readCompressedPage(pageHeader)
+//                  : readPage(pageHeader, header.getCompressionType());
+//              pageHeadersInChunk.add(pageHeader);
+//              dataInChunk.add(pageData);
+//            }
+//            pageHeadersInChunkGroup.add(pageHeadersInChunk);
+//            pageDataInChunkGroup.add(dataInChunk);
+//            pagePartitionInfoInChunkGroup.add(pagePartitionInfo);
+//            break;
+//          case MetaMarker.CHUNK_GROUP_HEADER:
+//            // this is the footer of a ChunkGroup.
+//            ChunkGroupHeader chunkGroupHeader = this.readChunkGroupFooter();
+//            String deviceID = chunkGroupHeader.getDeviceID();
+//            rewrite(oldTsFile, deviceID, measurementSchemaList, pageHeadersInChunkGroup,
+//                pageDataInChunkGroup, versionOfChunkGroup, pagePartitionInfoInChunkGroup);
+//
+//            pageHeadersInChunkGroup.clear();
+//            pageDataInChunkGroup.clear();
+//            measurementSchemaList.clear();
+//            pagePartitionInfoInChunkGroup.clear();
+//            newChunkGroup = true;
+//            chunkGroupCount++;
+//            break;
+//
+//          default:
+//            // the disk file is corrupted, using this file may be dangerous
+//            logger.error("Unrecognized marker detected, this file may be corrupted");
+//            return;
+//        }
+//      }
+//      // close upgraded tsFiles and generate resources for them
+//      for (TsFileIOWriter tsFileIOWriter : partitionWriterMap.values()) {
+//        upgradedResources.add(endFileAndGenerateResource(tsFileIOWriter));
+//      }
+//    } catch (IOException e2) {
+//      logger.info("TsFile upgrade process cannot proceed at position {} after {} chunk groups "
+//          + "recovered, because : {}", this.position(), chunkGroupCount, e2.getMessage());
+//    } finally {
+//      if (tsFileInput != null) {
+//        tsFileInput.close();
+//      }
+//    }
+//  }
+//
+//  private boolean checkIfPageInSameTimePartition(PageHeader pageHeader) {
+//    return StorageEngine.getTimePartition(pageHeader.getStartTime())
+//        == StorageEngine.getTimePartition(pageHeader.getEndTime());
+//  }
+//
+//  /**
+//   * This method is for rewriting the ChunkGroup which data is in the different time partitions. In
+//   * this case, we have to decode the data to points, and then rewrite the data points to different
+//   * chunkWriters, finally write chunks to their own upgraded TsFiles
+//   */
+//  private void rewrite(File oldTsFile, String deviceId, List<MeasurementSchema> schemas,
+//      List<List<PageHeader>> pageHeadersInChunkGroup, List<List<ByteBuffer>> dataInChunkGroup,
+//      long versionOfChunkGroup, List<List<Boolean>> pagePartitionInfoInChunkGroup)
+//      throws IOException, PageException {
+//    Map<Long, Map<MeasurementSchema, ChunkWriterImpl>> chunkWritersInChunkGroup = new HashMap<>();
+//    for (int i = 0; i < schemas.size(); i++) {
+//      MeasurementSchema schema = schemas.get(i);
+//      List<ByteBuffer> pageDataInChunk = dataInChunkGroup.get(i);
+//      List<PageHeader> pageHeadersInChunk = pageHeadersInChunkGroup.get(i);
+//      List<Boolean> pagePartitionInfo = pagePartitionInfoInChunkGroup.get(i);
+//      valueDecoder = Decoder
+//          .getDecoderByType(schema.getEncodingType(), schema.getType());
+//      for (int j = 0; j < pageDataInChunk.size(); j++) {
+//        if (Boolean.TRUE.equals(pagePartitionInfo.get(j))) {
+//          writePageInSamePartitionToFile(oldTsFile, schema, pageHeadersInChunk.get(j),
+//              pageDataInChunk.get(j), chunkWritersInChunkGroup);
+//        } else {
+//          writePageInDifferentPartitionsToFiles(oldTsFile, schema, pageDataInChunk.get(j),
+//              chunkWritersInChunkGroup);
+//        }
+//      }
+//    }
+//
+//    for (Entry<Long, Map<MeasurementSchema, ChunkWriterImpl>> entry : chunkWritersInChunkGroup
+//        .entrySet()) {
+//      long partitionId = entry.getKey();
+//      TsFileIOWriter tsFileIOWriter = partitionWriterMap.get(partitionId);
+//      tsFileIOWriter.startChunkGroup(deviceId);
+//      // write chunks to their own upgraded tsFiles
+//      for (IChunkWriter chunkWriter : entry.getValue().values()) {
+//        chunkWriter.writeToFileWriter(tsFileIOWriter);
+//      }
+//      tsFileIOWriter.endChunkGroup();
+//      tsFileIOWriter.writeVersion(versionOfChunkGroup);
+//    }
+//  }
+//
+//  private TsFileIOWriter getOrDefaultTsFileIOWriter(File oldTsFile, long partition) {
+//    return partitionWriterMap.computeIfAbsent(partition, k ->
+//        {
+//          File partitionDir = FSFactoryProducer.getFSFactory().getFile(oldTsFile.getParent()
+//              + File.separator + partition);
+//          if (!partitionDir.exists()) {
+//            partitionDir.mkdirs();
+//          }
+//          File newFile = FSFactoryProducer.getFSFactory().getFile(oldTsFile.getParent()
+//              + File.separator + partition + File.separator + oldTsFile.getName());
+//          try {
+//            if (!newFile.createNewFile()) {
+//              logger.error("The TsFile {} has been created ", newFile);
+//              return null;
+//            }
+//            return new TsFileIOWriter(newFile);
+//          } catch (IOException e) {
+//            logger.error("Create new TsFile {} failed ", newFile);
+//            return null;
+//          }
+//        }
+//    );
+//  }
+//
+//  private void writePageInSamePartitionToFile(File oldTsFile, MeasurementSchema schema,
+//      PageHeader pageHeader,
+//      ByteBuffer pageData,
+//      Map<Long, Map<MeasurementSchema, ChunkWriterImpl>> chunkWritersInChunkGroup)
+//      throws PageException {
+//    long partitionId = StorageEngine.getTimePartition(pageHeader.getStartTime());
+//    getOrDefaultTsFileIOWriter(oldTsFile, partitionId);
+//    Map<MeasurementSchema, ChunkWriterImpl> chunkWriters = chunkWritersInChunkGroup
+//        .getOrDefault(partitionId, new HashMap<>());
+//    ChunkWriterImpl chunkWriter = chunkWriters
+//        .getOrDefault(schema, new ChunkWriterImpl(schema));
+//    chunkWriter.writePageHeaderAndDataIntoBuff(pageData, pageHeader);
+//    chunkWriters.put(schema, chunkWriter);
+//    chunkWritersInChunkGroup.put(partitionId, chunkWriters);
+//  }
+//
+//  private void writePageInDifferentPartitionsToFiles(File oldTsFile, MeasurementSchema schema,
+//      ByteBuffer pageData,
+//      Map<Long, Map<MeasurementSchema, ChunkWriterImpl>> chunkWritersInChunkGroup)
+//      throws IOException {
+//    valueDecoder.reset();
+//    PageReader pageReader = new PageReader(pageData, schema.getType(), valueDecoder,
+//        defaultTimeDecoder, null);
+//    BatchData batchData = pageReader.getAllSatisfiedPageData();
+//    while (batchData.hasCurrent()) {
+//      long time = batchData.currentTime();
+//      Object value = batchData.currentValue();
+//      long partitionId = StorageEngine.getTimePartition(time);
+//
+//      Map<MeasurementSchema, ChunkWriterImpl> chunkWriters = chunkWritersInChunkGroup
+//          .getOrDefault(partitionId, new HashMap<>());
+//      ChunkWriterImpl chunkWriter = chunkWriters
+//          .getOrDefault(schema, new ChunkWriterImpl(schema));
+//      getOrDefaultTsFileIOWriter(oldTsFile, partitionId);
+//      switch (schema.getType()) {
+//        case INT32:
+//          chunkWriter.write(time, (int) value);
+//          break;
+//        case INT64:
+//          chunkWriter.write(time, (long) value);
+//          break;
+//        case FLOAT:
+//          chunkWriter.write(time, (float) value);
+//          break;
+//        case DOUBLE:
+//          chunkWriter.write(time, (double) value);
+//          break;
+//        case BOOLEAN:
+//          chunkWriter.write(time, (boolean) value);
+//          break;
+//        case TEXT:
+//          chunkWriter.write(time, (Binary) value);
+//          break;
+//        default:
+//          throw new UnSupportedDataTypeException(
+//              String.format("Data type %s is not supported.", schema.getType()));
+//      }
+//      batchData.next();
+//      chunkWriters.put(schema, chunkWriter);
+//      chunkWritersInChunkGroup.put(partitionId, chunkWriters);
+//    }
+//  }
+//
+//  /**
+//   * check if the file to be upgraded has correct magic strings and version number
+//   */
+//  private boolean fileCheck(File oldTsFile) throws IOException {
+//    long fileSize;
+//    if (!oldTsFile.exists()) {
+//      logger.error("the file to be updated does not exist, file path: {}", oldTsFile.getPath());
+//      return false;
+//    } else {
+//      fileSize = oldTsFile.length();
+//    }
+//
+//    String magic = readHeadMagic(true);
+//    if (!magic.equals(TSFileConfig.MAGIC_STRING)) {
+//      logger.error("the file's MAGIC STRING is incorrect, file path: {}", oldTsFile.getPath());
+//      return false;
+//    }
+//
+//    String versionNumber = readVersionNumber();
+//    if (!versionNumber.equals(TSFileConfig.VERSION_NUMBER_V1)) {
+//      logger.error("the file's Version Number is incorrect, file path: {}", oldTsFile.getPath());
+//      return false;
+//    }
+//
+//    if (fileSize == TSFileConfig.MAGIC_STRING.length()) {
+//      logger.error("the file only contains magic string, file path: {}", oldTsFile.getPath());
+//      return false;
+//    } else if (!readTailMagic().equals(TSFileConfig.MAGIC_STRING)) {
+//      logger.error("the file cannot upgrade, file path: {}", oldTsFile.getPath());
+//      return false;
+//    }
+//    return true;
+//  }
+//
+//  private Map<Long, Long> getVersionInfo() throws IOException {
+//    Map<Long, Long> versionInfo = new HashMap<>();
+//    TsFileMetadataV1 fileMetadata = readFileMetadata();
+//    List<TsDeviceMetadataV1> oldDeviceMetadataList = new ArrayList<>();
+//    for (TsDeviceMetadataIndexV1 index : fileMetadata.getDeviceMap().values()) {
+//      TsDeviceMetadataV1 oldDeviceMetadata = readTsDeviceMetaData(index);
+//      oldDeviceMetadataList.add(oldDeviceMetadata);
+//    }
+//
+//    for (TsDeviceMetadataV1 oldTsDeviceMetadata : oldDeviceMetadataList) {
+//      for (ChunkGroupMetaDataV1 oldChunkGroupMetadata : oldTsDeviceMetadata
+//          .getChunkGroupMetaDataList()) {
+//        long version = oldChunkGroupMetadata.getVersion();
+//        long offsetOfChunkGroup = oldChunkGroupMetadata.getStartOffsetOfChunkGroup();
+//        // get version informations
+//        versionInfo.put(offsetOfChunkGroup, version);
+//      }
+//    }
+//    return versionInfo;
+//  }
+//
+//  private TsFileResource endFileAndGenerateResource(TsFileIOWriter tsFileIOWriter)
+//      throws IOException {
+//    tsFileIOWriter.endFile();
+//    TsFileResource tsFileResource = new TsFileResource(tsFileIOWriter.getFile());
+//    Map<String, List<TimeseriesMetadata>> deviceTimeseriesMetadataMap = tsFileIOWriter
+//        .getDeviceTimeseriesMetadataMap();
+//    for (Map.Entry<String, List<TimeseriesMetadata>> entry : deviceTimeseriesMetadataMap
+//        .entrySet()) {
+//      String device = entry.getKey();
+//      for (TimeseriesMetadata timeseriesMetaData : entry.getValue()) {
+//        tsFileResource.updateStartTime(device, timeseriesMetaData.getStatistics().getStartTime());
+//        tsFileResource.updateEndTime(device, timeseriesMetaData.getStatistics().getEndTime());
+//      }
+//    }
+//    tsFileResource.setClosed(true);
+//    return tsFileResource;
+//  }
+//
+//}
\ No newline at end of file
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/common/EndianType.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/common/EndianType.java
deleted file mode 100644
index 1506d27..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/common/EndianType.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.encoding.common;
-
-/**
- * In current version, we only support BIG_ENDIAN mode.
- */
-public enum EndianType {
-  BIG_ENDIAN, LITTLE_ENDIAN
-}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/Decoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/Decoder.java
index 6bffedd..0e43cf6 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/Decoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/Decoder.java
@@ -22,8 +22,6 @@ package org.apache.iotdb.tsfile.encoding.decoder;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
-
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileDecodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
@@ -50,14 +48,14 @@ public abstract class Decoder {
   public static Decoder getDecoderByType(TSEncoding encoding, TSDataType dataType) {
     switch (encoding) {
       case PLAIN:
-        return new PlainDecoder(EndianType.BIG_ENDIAN);
+        return new PlainDecoder();
       case RLE:
         switch (dataType) {
           case BOOLEAN:
           case INT32:
-            return new IntRleDecoder(EndianType.BIG_ENDIAN);
+            return new IntRleDecoder();
           case INT64:
-            return new LongRleDecoder(EndianType.BIG_ENDIAN);
+            return new LongRleDecoder();
           case FLOAT:
           case DOUBLE:
             return new FloatDecoder(TSEncoding.valueOf(encoding.toString()), dataType);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/FloatDecoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/FloatDecoder.java
index eaaf729..ce0bc2a 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/FloatDecoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/FloatDecoder.java
@@ -21,17 +21,14 @@ package org.apache.iotdb.tsfile.encoding.decoder;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.encoding.encoder.FloatEncoder;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileDecodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.utils.Binary;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Decoder for float or double value using rle or two diff. For more info about
@@ -58,10 +55,10 @@ public class FloatDecoder extends Decoder {
     super(encodingType);
     if (encodingType == TSEncoding.RLE) {
       if (dataType == TSDataType.FLOAT) {
-        decoder = new IntRleDecoder(EndianType.BIG_ENDIAN);
+        decoder = new IntRleDecoder();
         logger.debug("tsfile-encoding FloatDecoder: init decoder using int-rle and float");
       } else if (dataType == TSDataType.DOUBLE) {
-        decoder = new LongRleDecoder(EndianType.BIG_ENDIAN);
+        decoder = new LongRleDecoder();
         logger.debug("tsfile-encoding FloatDecoder: init decoder using long-rle and double");
       } else {
         throw new TsFileDecodingException(String.format("data type %s is not supported by FloatDecoder", dataType));
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoder.java
index a41512f..0166446 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoder.java
@@ -21,15 +21,12 @@ package org.apache.iotdb.tsfile.encoding.decoder;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.encoding.bitpacking.IntPacker;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileDecodingException;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Decoder for int value using rle or bit-packing.
@@ -53,8 +50,8 @@ public class IntRleDecoder extends RleDecoder {
    */
   private IntPacker packer;
 
-  public IntRleDecoder(EndianType endianType) {
-    super(endianType);
+  public IntRleDecoder() {
+    super();
     currentValue = 0;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoder.java
index 3f8da99..de27c0c 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoder.java
@@ -21,15 +21,12 @@ package org.apache.iotdb.tsfile.encoding.decoder;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.encoding.bitpacking.LongPacker;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileDecodingException;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Decoder for long value using rle or bit-packing.
@@ -53,8 +50,8 @@ public class LongRleDecoder extends RleDecoder {
    */
   private LongPacker packer;
 
-  public LongRleDecoder(EndianType endianType) {
-    super(endianType);
+  public LongRleDecoder() {
+    super();
     currentValue = 0;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/PlainDecoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/PlainDecoder.java
index 1fd15c1..8c31cff 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/PlainDecoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/PlainDecoder.java
@@ -22,31 +22,16 @@ package org.apache.iotdb.tsfile.encoding.decoder;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileDecodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.utils.Binary;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 
 public class PlainDecoder extends Decoder {
 
-  private static final Logger logger = LoggerFactory.getLogger(PlainDecoder.class);
-  private EndianType endianType;
-
-  public EndianType getEndianType() {
-    return endianType;
-  }
-
-  public void setEndianType(EndianType endianType) {
-    this.endianType = endianType;
-  }
 
-  public PlainDecoder(EndianType endianType) {
+  public PlainDecoder() {
     super(TSEncoding.PLAIN);
-    this.endianType = endianType;
   }
 
   @Override
@@ -61,7 +46,7 @@ public class PlainDecoder extends Decoder {
 
   @Override
   public int readInt(ByteBuffer buffer) {
-    return buffer.getInt();
+    return ReadWriteForEncodingUtils.readVarInt(buffer);
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/RleDecoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/RleDecoder.java
index 8606537..bb5e703 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/RleDecoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/decoder/RleDecoder.java
@@ -22,10 +22,8 @@ package org.apache.iotdb.tsfile.encoding.decoder;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileDecodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.utils.Binary;
@@ -40,16 +38,6 @@ import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
  */
 public abstract class RleDecoder extends Decoder {
 
-  private EndianType endianType;
-
-  public EndianType getEndianType() {
-    return endianType;
-  }
-
-  public void setEndianType(EndianType endianType) {
-    this.endianType = endianType;
-  }
-
   protected TSFileConfig config = TSFileDescriptor.getInstance().getConfig();
   /**
    * mode to indicate current encoding type 0 - RLE 1 - BIT_PACKED.
@@ -87,9 +75,8 @@ public abstract class RleDecoder extends Decoder {
    * a constructor, init with endianType, default encoding is
    * <code>TSEncoding.RLE</code>.
    */
-  public RleDecoder(EndianType endianType) {
+  public RleDecoder() {
     super(TSEncoding.RLE);
-    this.endianType = endianType;
     reset();
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/FloatEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/FloatEncoder.java
index 873a4ba..1b4b558 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/FloatEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/FloatEncoder.java
@@ -21,8 +21,6 @@ package org.apache.iotdb.tsfile.encoding.encoder;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileEncodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
@@ -66,9 +64,9 @@ public class FloatEncoder extends Encoder {
     isMaxPointNumberSaved = false;
     if (encodingType == TSEncoding.RLE) {
       if (dataType == TSDataType.FLOAT) {
-        encoder = new IntRleEncoder(EndianType.BIG_ENDIAN);
+        encoder = new IntRleEncoder();
       } else if (dataType == TSDataType.DOUBLE) {
-        encoder = new LongRleEncoder(EndianType.BIG_ENDIAN);
+        encoder = new LongRleEncoder();
       } else {
         throw new TsFileEncodingException(String.format("data type %s is not supported by FloatEncoder", dataType));
       }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/IntRleEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/IntRleEncoder.java
index f3bb89a..9ed819e 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/IntRleEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/IntRleEncoder.java
@@ -22,10 +22,8 @@ package org.apache.iotdb.tsfile.encoding.encoder;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.encoding.bitpacking.IntPacker;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 
 /**
@@ -38,11 +36,11 @@ public class IntRleEncoder extends RleEncoder<Integer> {
    */
   private IntPacker packer;
 
-  public IntRleEncoder(EndianType endianType) {
-    super(endianType);
+  public IntRleEncoder() {
+    super();
     bufferedValues = new Integer[TSFileConfig.RLE_MIN_REPEATED_NUM];
     preValue = 0;
-    values = new ArrayList<Integer>();
+    values = new ArrayList<>();
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/LongRleEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/LongRleEncoder.java
index dbbbfbb..f8a33f3 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/LongRleEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/LongRleEncoder.java
@@ -22,10 +22,8 @@ package org.apache.iotdb.tsfile.encoding.encoder;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.encoding.bitpacking.LongPacker;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 
 /**
@@ -41,8 +39,8 @@ public class LongRleEncoder extends RleEncoder<Long> {
   /**
    * Constructor of LongRleEncoder.
    */
-  public LongRleEncoder(EndianType endianType) {
-    super(endianType);
+  public LongRleEncoder() {
+    super();
     bufferedValues = new Long[TSFileConfig.RLE_MIN_REPEATED_NUM];
     preValue = (long) 0;
     values = new ArrayList<Long>();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
index d30bcc8..50bff74 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
@@ -22,40 +22,27 @@ package org.apache.iotdb.tsfile.encoding.encoder;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.math.BigDecimal;
-
-import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileEncodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.utils.Binary;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class PlainEncoder extends Encoder {
 
   private static final Logger logger = LoggerFactory.getLogger(PlainEncoder.class);
-  private EndianType endianType;
   private TSDataType dataType;
   private int maxStringLength;
 
-  public PlainEncoder(EndianType endianType, TSDataType dataType, int maxStringLength) {
+  public PlainEncoder(TSDataType dataType, int maxStringLength) {
     super(TSEncoding.PLAIN);
-    this.endianType = endianType;
     this.dataType = dataType;
     this.maxStringLength = maxStringLength;
   }
 
-  public void setEndianType(EndianType endianType) {
-    this.endianType = endianType;
-  }
-
-  public EndianType getEndianType() {
-    return endianType;
-  }
-
   @Override
   public void encode(boolean value, ByteArrayOutputStream out) {
     if (value) {
@@ -67,40 +54,19 @@ public class PlainEncoder extends Encoder {
 
   @Override
   public void encode(short value, ByteArrayOutputStream out) {
-    if (this.endianType == EndianType.LITTLE_ENDIAN) {
-      out.write(value & 0xFF);
-      out.write((value >> 8) & 0xFF);
-    } else if (this.endianType == EndianType.BIG_ENDIAN) {
-      out.write((value >> 8) & 0xFF);
-      out.write(value & 0xFF);
-    }
+    out.write((value >> 8) & 0xFF);
+    out.write(value & 0xFF);
   }
 
   @Override
   public void encode(int value, ByteArrayOutputStream out) {
-    if (this.endianType == EndianType.LITTLE_ENDIAN) {
-      out.write(value & 0xFF);
-      out.write((value >> 8) & 0xFF);
-      out.write((value >> 16) & 0xFF);
-      out.write((value >> 24) & 0xFF);
-    } else if (this.endianType == EndianType.BIG_ENDIAN) {
-      out.write((value >> 24) & 0xFF);
-      out.write((value >> 16) & 0xFF);
-      out.write((value >> 8) & 0xFF);
-      out.write(value & 0xFF);
-    }
+    ReadWriteForEncodingUtils.writeVarInt(value, out);
   }
 
   @Override
   public void encode(long value, ByteArrayOutputStream out) {
-    if (this.endianType == EndianType.LITTLE_ENDIAN) {
-      for (int i = 0; i < 8; i++) {
-        out.write((byte) (((value) >> (i * 8)) & 0xFF));
-      }
-    } else if (this.endianType == EndianType.BIG_ENDIAN) {
-      for (int i = 7; i >= 0; i--) {
-        out.write((byte) (((value) >> (i * 8)) & 0xFF));
-      }
+    for (int i = 7; i >= 0; i--) {
+      out.write((byte) (((value) >> (i * 8)) & 0xFF));
     }
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/RleEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/RleEncoder.java
index 51003ce..51ad831 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/RleEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/RleEncoder.java
@@ -24,17 +24,14 @@ import java.io.IOException;
 import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.List;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.encoding.TsFileEncodingException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
 import org.apache.iotdb.tsfile.utils.Binary;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Encodes values using a combination of run length encoding and bit packing,
@@ -66,16 +63,6 @@ public abstract class RleEncoder<T extends Comparable<T>> extends Encoder {
 
   private static final Logger logger = LoggerFactory.getLogger(RleEncoder.class);
 
-  private EndianType endianType;
-
-  public EndianType getEndianType() {
-    return endianType;
-  }
-
-  public void setEndianType(EndianType endianType) {
-    this.endianType = endianType;
-  }
-
   /**
    * we save all value in a list and calculate its bitwidth.
    */
@@ -134,9 +121,8 @@ public abstract class RleEncoder<T extends Comparable<T>> extends Encoder {
   /**
    * constructor.
    */
-  public RleEncoder(EndianType endianType) {
+  public RleEncoder() {
     super(TSEncoding.RLE);
-    this.endianType = endianType;
     bytesBuffer = new ArrayList<>();
     isBitPackRun = false;
     isBitWidthSaved = false;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/TSEncodingBuilder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/TSEncodingBuilder.java
index 0f6cd7c..8fd3cfa 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/TSEncodingBuilder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/TSEncodingBuilder.java
@@ -20,17 +20,14 @@
 package org.apache.iotdb.tsfile.encoding.encoder;
 
 import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.common.constant.JsonFormatConstant;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.exception.write.UnSupportedDataTypeException;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Each subclass of TSEncodingBuilder responds a enumerate value in {@linkplain TSEncoding
@@ -105,7 +102,7 @@ public abstract class TSEncodingBuilder {
 
     @Override
     public Encoder getEncoder(TSDataType type) {
-      return new PlainEncoder(EndianType.BIG_ENDIAN, type, maxStringLength);
+      return new PlainEncoder(type, maxStringLength);
     }
 
     @Override
@@ -137,9 +134,9 @@ public abstract class TSEncodingBuilder {
       switch (type) {
         case INT32:
         case BOOLEAN:
-          return new IntRleEncoder(EndianType.BIG_ENDIAN);
+          return new IntRleEncoder();
         case INT64:
-          return new LongRleEncoder(EndianType.BIG_ENDIAN);
+          return new LongRleEncoder();
         case FLOAT:
         case DOUBLE:
           return new FloatEncoder(TSEncoding.RLE, type, maxPointNumber);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index fd3e50a..a966ba4 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -659,7 +659,7 @@ public class TsFileSequenceReader implements AutoCloseable {
    * @return a CHUNK_GROUP_FOOTER
    * @throws IOException io error
    */
-  public ChunkGroupHeader readChunkGroupFooter() throws IOException {
+  public ChunkGroupHeader readChunkGroupHeader() throws IOException {
     return ChunkGroupHeader.deserializeFrom(tsFileInput.wrapAsInputStream(), true);
   }
 
@@ -671,7 +671,7 @@ public class TsFileSequenceReader implements AutoCloseable {
    * @return a CHUNK_GROUP_FOOTER
    * @throws IOException io error
    */
-  public ChunkGroupHeader readChunkGroupFooter(long position, boolean markerRead)
+  public ChunkGroupHeader readChunkGroupHeader(long position, boolean markerRead)
       throws IOException {
     return ChunkGroupHeader.deserializeFrom(tsFileInput, position, markerRead);
   }
@@ -968,7 +968,7 @@ public class TsFileSequenceReader implements AutoCloseable {
             // this is a chunk group
             // if there is something wrong with the ChunkGroup Footer, we will drop this ChunkGroup
             // because we can not guarantee the correctness of the deviceId.
-            ChunkGroupHeader chunkGroupHeader = this.readChunkGroupFooter();
+            ChunkGroupHeader chunkGroupHeader = this.readChunkGroupHeader();
             deviceID = chunkGroupHeader.getDeviceID();
             if (newSchema != null) {
               for (MeasurementSchema tsSchema : measurementSchemaList) {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
index 57549a4..f747926 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/reader/chunk/ChunkReader.java
@@ -33,7 +33,6 @@ import org.apache.iotdb.tsfile.read.reader.IPageReader;
 import org.apache.iotdb.tsfile.read.filter.basic.Filter;
 import org.apache.iotdb.tsfile.read.reader.IChunkReader;
 import org.apache.iotdb.tsfile.read.reader.page.PageReader;
-import org.apache.iotdb.tsfile.v1.file.utils.HeaderUtils;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -92,8 +91,7 @@ public class ChunkReader implements IChunkReader {
     // construct next satisfied page header
     while (chunkDataBuffer.remaining() > 0) {
       // deserialize a PageHeader from chunkDataBuffer
-      PageHeader pageHeader = isFromOldTsFile ? HeaderUtils.deserializePageHeaderV1(chunkDataBuffer, chunkHeader.getDataType()) :
-          PageHeader.deserializeFrom(chunkDataBuffer, chunkHeader.getDataType());
+      PageHeader pageHeader = PageHeader.deserializeFrom(chunkDataBuffer, chunkHeader.getDataType());
       // if the current page satisfies
       if (pageSatisfied(pageHeader)) {
         pageReaderList.add(constructPageReaderForNextPage(pageHeader));
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java
index 52052d4..5dbd171 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriter.java
@@ -91,7 +91,6 @@ public class RestorableTsFileIOWriter extends TsFileIOWriter {
       try (TsFileSequenceReader reader = new TsFileSequenceReader(file.getAbsolutePath(), false)) {
 
         truncatedSize = reader.selfCheck(knownSchemas, chunkGroupMetadataList, versionInfo, true);
-        totalChunkNum = reader.getTotalChunkNum();
         if (truncatedSize == TsFileCheckStatus.COMPLETE_FILE) {
           crashed = false;
           canWrite = false;
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoderTest.java
index bed4ee9..95b413d 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/IntRleDecoderTest.java
@@ -26,30 +26,22 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
-import org.apache.iotdb.tsfile.encoding.decoder.IntRleDecoder;
-import org.apache.iotdb.tsfile.encoding.decoder.RleDecoder;
 import org.apache.iotdb.tsfile.encoding.encoder.IntRleEncoder;
 import org.apache.iotdb.tsfile.encoding.encoder.RleEncoder;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 public class IntRleDecoderTest {
 
   private List<Integer> rleList;
   private List<Integer> bpList;
   private List<Integer> hybridList;
-  private int rleBitWidth;
-  private int bpBitWidth;
-  private int hybridWidth;
 
   @Before
   public void setUp() throws Exception {
-    rleList = new ArrayList<Integer>();
+    rleList = new ArrayList<>();
     int rleCount = 11;
     int rleNum = 18;
     int rleStart = 11;
@@ -63,9 +55,8 @@ public class IntRleDecoderTest {
       rleCount += 2;
       rleStart *= -3;
     }
-    rleBitWidth = ReadWriteForEncodingUtils.getIntMaxBitWidth(rleList);
 
-    bpList = new ArrayList<Integer>();
+    bpList = new ArrayList<>();
     int bpCount = 100000;
     int bpStart = 11;
     for (int i = 0; i < bpCount; i++) {
@@ -76,9 +67,8 @@ public class IntRleDecoderTest {
         bpList.add(bpStart);
       }
     }
-    bpBitWidth = ReadWriteForEncodingUtils.getIntMaxBitWidth(bpList);
 
-    hybridList = new ArrayList<Integer>();
+    hybridList = new ArrayList<>();
     int hybridCount = 11;
     int hybridNum = 1000;
     int hybridStart = 20;
@@ -101,7 +91,6 @@ public class IntRleDecoderTest {
       }
       hybridCount += 2;
     }
-    hybridWidth = ReadWriteForEncodingUtils.getIntMaxBitWidth(hybridList);
   }
 
   @After
@@ -114,17 +103,16 @@ public class IntRleDecoderTest {
     for (int i = 7000000; i < 10000000; i++) {
       list.add(i);
     }
-    int width = ReadWriteForEncodingUtils.getIntMaxBitWidth(list);
-    testLength(list, width, false, 1);
+    testLength(list, false, 1);
     for (int i = 1; i < 10; i++) {
-      testLength(list, width, false, i);
+      testLength(list, false, i);
     }
   }
 
   @Test
   public void testRleReadInt() throws IOException {
     for (int i = 1; i < 10; i++) {
-      testLength(rleList, rleBitWidth, false, i);
+      testLength(rleList, false, i);
     }
   }
 
@@ -144,30 +132,29 @@ public class IntRleDecoderTest {
       rleCount *= 7;
       rleStart *= -3;
     }
-    int bitWidth = ReadWriteForEncodingUtils.getIntMaxBitWidth(repeatList);
     for (int i = 1; i < 10; i++) {
-      testLength(repeatList, bitWidth, false, i);
+      testLength(repeatList, false, i);
     }
   }
 
   @Test
   public void testBitPackingReadInt() throws IOException {
     for (int i = 1; i < 10; i++) {
-      testLength(bpList, bpBitWidth, false, i);
+      testLength(bpList, false, i);
     }
   }
 
   @Test
   public void testHybridReadInt() throws IOException {
     for (int i = 1; i < 3; i++) {
-      testLength(hybridList, hybridWidth, false, i);
+      testLength(hybridList, false, i);
     }
   }
 
   @Test
   public void testHybridReadBoolean() throws IOException {
     for (int i = 1; i < 10; i++) {
-      testLength(hybridList, hybridWidth, false, i);
+      testLength(hybridList, false, i);
     }
   }
 
@@ -178,44 +165,10 @@ public class IntRleDecoderTest {
     }
   }
 
-  public void testBooleanLength(List<Integer> list, int bitWidth, boolean isDebug, int repeatCount)
+  public void testLength(List<Integer> list, boolean isDebug, int repeatCount)
       throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    RleEncoder<Integer> encoder = new IntRleEncoder(EndianType.BIG_ENDIAN);
-    for (int i = 0; i < repeatCount; i++) {
-      for (int value : list) {
-        if (value % 2 == 0) {
-          encoder.encode(false, baos);
-        } else {
-          encoder.encode(true, baos);
-        }
-
-      }
-      encoder.flush(baos);
-    }
-
-    ByteBuffer buffer = ByteBuffer.wrap(baos.toByteArray());
-    RleDecoder decoder = new IntRleDecoder(EndianType.BIG_ENDIAN);
-    for (int i = 0; i < repeatCount; i++) {
-      for (int value : list) {
-        boolean value_ = decoder.readBoolean(buffer);
-        if (isDebug) {
-          System.out.println(value_ + "/" + value);
-        }
-        if (value % 2 == 0) {
-          assertEquals(false, value_);
-        } else {
-          assertEquals(true, value_);
-        }
-
-      }
-    }
-  }
-
-  public void testLength(List<Integer> list, int bitWidth, boolean isDebug, int repeatCount)
-      throws IOException {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    RleEncoder<Integer> encoder = new IntRleEncoder(EndianType.BIG_ENDIAN);
+    RleEncoder<Integer> encoder = new IntRleEncoder();
     for (int i = 0; i < repeatCount; i++) {
       for (int value : list) {
         encoder.encode(value, baos);
@@ -224,7 +177,7 @@ public class IntRleDecoderTest {
     }
 
     ByteBuffer buffer = ByteBuffer.wrap(baos.toByteArray());
-    RleDecoder decoder = new IntRleDecoder(EndianType.BIG_ENDIAN);
+    RleDecoder decoder = new IntRleDecoder();
     for (int i = 0; i < repeatCount; i++) {
       for (int value : list) {
         int value_ = decoder.readInt(buffer);
@@ -237,14 +190,14 @@ public class IntRleDecoderTest {
   }
 
   private void testBitPackedReadHeader(int num) throws IOException {
-    List<Integer> list = new ArrayList<Integer>();
+    List<Integer> list = new ArrayList<>();
 
     for (int i = 0; i < num; i++) {
       list.add(i);
     }
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     int bitWidth = ReadWriteForEncodingUtils.getIntMaxBitWidth(list);
-    RleEncoder<Integer> encoder = new IntRleEncoder(EndianType.BIG_ENDIAN);
+    RleEncoder<Integer> encoder = new IntRleEncoder();
     for (int value : list) {
       encoder.encode(value, baos);
     }
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoderTest.java
index ad41ac3..153112d 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/encoding/decoder/LongRleDecoderTest.java
@@ -26,17 +26,12 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
-import org.apache.iotdb.tsfile.encoding.decoder.LongRleDecoder;
-import org.apache.iotdb.tsfile.encoding.decoder.RleDecoder;
 import org.apache.iotdb.tsfile.encoding.encoder.LongRleEncoder;
 import org.apache.iotdb.tsfile.encoding.encoder.RleEncoder;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 public class LongRleDecoderTest {
 
@@ -184,7 +179,7 @@ public class LongRleDecoderTest {
 
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     int bitWidth = ReadWriteForEncodingUtils.getLongMaxBitWidth(list);
-    RleEncoder<Long> encoder = new LongRleEncoder(EndianType.BIG_ENDIAN);
+    RleEncoder<Long> encoder = new LongRleEncoder();
     for (long value : list) {
       encoder.encode(value, baos);
     }
@@ -206,7 +201,7 @@ public class LongRleDecoderTest {
   public void testLength(List<Long> list, int bitWidth, boolean isDebug, int repeatCount)
       throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    RleEncoder<Long> encoder = new LongRleEncoder(EndianType.BIG_ENDIAN);
+    RleEncoder<Long> encoder = new LongRleEncoder();
     for (int i = 0; i < repeatCount; i++) {
       for (long value : list) {
         encoder.encode(value, baos);
@@ -215,7 +210,7 @@ public class LongRleDecoderTest {
     }
 
     ByteBuffer buffer = ByteBuffer.wrap(baos.toByteArray());
-    RleDecoder decoder = new LongRleDecoder(EndianType.BIG_ENDIAN);
+    RleDecoder decoder = new LongRleDecoder();
     for (int i = 0; i < repeatCount; i++) {
       for (long value : list) {
         long value_ = decoder.readLong(buffer);
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatisticsTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatisticsTest.java
index 0a0131c..4bcbe5b 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatisticsTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatisticsTest.java
@@ -18,11 +18,11 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.junit.Test;
-
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
+import org.junit.Test;
+
 public class DoubleStatisticsTest {
 
   private static final double maxError = 0.0001d;
@@ -36,7 +36,7 @@ public class DoubleStatisticsTest {
     assertFalse(doubleStats.isEmpty());
     assertEquals(2.32d, doubleStats.getMaxValue(), maxError);
     assertEquals(1.34d, doubleStats.getMinValue(), maxError);
-    assertEquals(2.32d + 1.34d, doubleStats.getSumValue(), maxError);
+    assertEquals(2.32d + 1.34d, doubleStats.getSumDoubleValue(), maxError);
     assertEquals(1.34d, doubleStats.getFirstValue(), maxError);
     assertEquals(2.32d, doubleStats.getLastValue(), maxError);
   }
@@ -60,14 +60,14 @@ public class DoubleStatisticsTest {
     assertFalse(doubleStats3.isEmpty());
     assertEquals(100.13453d, doubleStats3.getMaxValue(), maxError);
     assertEquals(1.34d, doubleStats3.getMinValue(), maxError);
-    assertEquals(100.13453d + 1.34d, doubleStats3.getSumValue(), maxError);
+    assertEquals(100.13453d + 1.34d, doubleStats3.getSumDoubleValue(), maxError);
     assertEquals(1.34d, doubleStats3.getFirstValue(), maxError);
     assertEquals(100.13453d, doubleStats3.getLastValue(), maxError);
 
     doubleStats3.mergeStatistics(doubleStats2);
     assertEquals(200.435d, doubleStats3.getMaxValue(), maxError);
     assertEquals(1.34d, doubleStats3.getMinValue(), maxError);
-    assertEquals(100.13453d + 1.34d + 200.435d, doubleStats3.getSumValue(), maxError);
+    assertEquals(100.13453d + 1.34d + 200.435d, doubleStats3.getSumDoubleValue(), maxError);
     assertEquals(1.34d, doubleStats3.getFirstValue(), maxError);
     assertEquals(200.435d, doubleStats3.getLastValue(), maxError);
 
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatisticsTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatisticsTest.java
index 8bd49c3..2867a23 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatisticsTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatisticsTest.java
@@ -18,11 +18,11 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.junit.Test;
-
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
+import org.junit.Test;
+
 public class FloatStatisticsTest {
 
   private static final float maxError = 0.0001f;
@@ -36,7 +36,7 @@ public class FloatStatisticsTest {
     assertFalse(floatStats.isEmpty());
     assertEquals(2.32f, (double) floatStats.getMaxValue(), maxError);
     assertEquals(1.34f, (double) floatStats.getMinValue(), maxError);
-    assertEquals(2.32f + 1.34f, (double) floatStats.getSumValue(), maxError);
+    assertEquals(2.32f + 1.34f, (double) floatStats.getSumDoubleValue(), maxError);
     assertEquals(1.34f, (double) floatStats.getFirstValue(), maxError);
     assertEquals(2.32f, (double) floatStats.getLastValue(), maxError);
   }
@@ -60,14 +60,14 @@ public class FloatStatisticsTest {
     assertFalse(floatStats3.isEmpty());
     assertEquals(100.13453f, floatStats3.getMaxValue(), maxError);
     assertEquals(1.34f, floatStats3.getMinValue(), maxError);
-    assertEquals(100.13453f + 1.34f, (float) floatStats3.getSumValue(), maxError);
+    assertEquals(100.13453f + 1.34f, (float) floatStats3.getSumDoubleValue(), maxError);
     assertEquals(1.34f, floatStats3.getFirstValue(), maxError);
     assertEquals(100.13453f, floatStats3.getLastValue(), maxError);
 
     floatStats3.mergeStatistics(floatStats2);
     assertEquals(200.435f, floatStats3.getMaxValue(), maxError);
     assertEquals(1.34f, floatStats3.getMinValue(), maxError);
-    assertEquals(100.13453f + 1.34f + 200.435f, (float) floatStats3.getSumValue(), maxError);
+    assertEquals(100.13453f + 1.34f + 200.435f, (float) floatStats3.getSumDoubleValue(), maxError);
     assertEquals(1.34f, floatStats3.getFirstValue(), maxError);
     assertEquals(200.435f, floatStats3.getLastValue(), maxError);
 
@@ -87,7 +87,6 @@ public class FloatStatisticsTest {
     assertEquals(122.34f, floatStats3.getFirstValue(), maxError);
     assertEquals(125.34f, floatStats3.getLastValue(), maxError);
 
-
     floatStats3.mergeStatistics(floatStats5);
     assertEquals(122.34f, floatStats3.getFirstValue(), maxError);
     assertEquals(125.34f, floatStats3.getLastValue(), maxError);
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatisticsTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatisticsTest.java
index 9de231e..469f7b1 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatisticsTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/IntegerStatisticsTest.java
@@ -18,11 +18,11 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.junit.Test;
-
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
+import org.junit.Test;
+
 public class IntegerStatisticsTest {
 
   @Test
@@ -35,7 +35,7 @@ public class IntegerStatisticsTest {
     assertEquals(2, (int) intStats.getMaxValue());
     assertEquals(1, (int) intStats.getMinValue());
     assertEquals(1, (int) intStats.getFirstValue());
-    assertEquals(3, (int) intStats.getSumValue());
+    assertEquals(3, (int) intStats.getSumLongValue());
     assertEquals(2, (int) intStats.getLastValue());
   }
 
@@ -59,14 +59,14 @@ public class IntegerStatisticsTest {
     assertEquals(100, (int) intStats3.getMaxValue());
     assertEquals(1, (int) intStats3.getMinValue());
     assertEquals(1, (int) intStats3.getFirstValue());
-    assertEquals(1 + 100, (int) intStats3.getSumValue());
+    assertEquals(1 + 100, (int) intStats3.getSumLongValue());
     assertEquals(100, (int) intStats3.getLastValue());
 
     intStats3.mergeStatistics(intStats2);
     assertEquals(200, (int) intStats3.getMaxValue());
     assertEquals(1, (int) intStats3.getMinValue());
     assertEquals(1, (int) intStats3.getFirstValue());
-    assertEquals(101 + 200, (int) intStats3.getSumValue());
+    assertEquals(101 + 200, (int) intStats3.getSumLongValue());
     assertEquals(200, (int) intStats3.getLastValue());
 
     // Unseq merge
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatisticsTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatisticsTest.java
index 914d37f..4aa0c6f 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatisticsTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatisticsTest.java
@@ -18,11 +18,14 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
 public class LongStatisticsTest {
 
   @Test
@@ -38,7 +41,7 @@ public class LongStatisticsTest {
     assertEquals(secondValue, (long) longStats.getMaxValue());
     assertEquals(firstValue, (long) longStats.getMinValue());
     assertEquals(firstValue, (long) longStats.getFirstValue());
-    assertEquals(firstValue + secondValue, (long) longStats.getSumValue());
+    assertEquals(firstValue + secondValue, (long) longStats.getSumDoubleValue());
     assertEquals(secondValue, (long) longStats.getLastValue());
   }
 
@@ -63,14 +66,14 @@ public class LongStatisticsTest {
     assertFalse(longStats3.isEmpty());
     assertEquals(max1, (long) longStats3.getMaxValue());
     assertEquals(1, (long) longStats3.getMinValue());
-    assertEquals(max1 + 1, (long) longStats3.getSumValue());
+    assertEquals(max1 + 1, (long) longStats3.getSumDoubleValue());
     assertEquals(1, (long) longStats3.getFirstValue());
     assertEquals(max1, (long) longStats3.getLastValue());
 
     longStats3.mergeStatistics(longStats2);
     assertEquals(max2, (long) longStats3.getMaxValue());
     assertEquals(1, (long) longStats3.getMinValue());
-    assertEquals(max2 + max1 + 1, (long) longStats3.getSumValue());
+    assertEquals(max2 + max1 + 1, (long) longStats3.getSumDoubleValue());
     assertEquals(1, (long) longStats3.getFirstValue());
     assertEquals(max2, (long) longStats3.getLastValue());
 
@@ -89,7 +92,7 @@ public class LongStatisticsTest {
     // if not merge, the min value will not be changed by smaller value in
     // intStats5
     assertEquals(1, (long) longStats3.getMinValue());
-    assertEquals(max2 + max1 + 1, (long) longStats3.getSumValue());
+    assertEquals(max2 + max1 + 1, (long) longStats3.getSumDoubleValue());
     assertEquals(1, (long) longStats3.getFirstValue());
     assertEquals(max2, (long) longStats3.getLastValue());
 
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/utils/Utils.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/utils/Utils.java
index 4dff2b2..cdbd50f 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/utils/Utils.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/file/metadata/utils/Utils.java
@@ -29,6 +29,8 @@ import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexNode;
 import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
+import org.apache.iotdb.tsfile.file.metadata.statistics.BooleanStatistics;
+import org.apache.iotdb.tsfile.file.metadata.statistics.IntegerStatistics;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 import org.junit.Assert;
 
@@ -131,11 +133,11 @@ public class Utils {
 
   public static void isPageHeaderEqual(PageHeader header1, PageHeader header2) {
     if (Utils.isTwoObjectsNotNULL(header1, header2, "PageHeader")) {
-      assertTrue(header1.getUncompressedSize() == header2.getUncompressedSize());
-      assertTrue(header1.getCompressedSize() == header2.getCompressedSize());
-      assertTrue(header1.getNumOfValues() == header2.getNumOfValues());
-      assertTrue(header1.getEndTime() == header2.getEndTime());
-      assertTrue(header1.getStartTime() == header2.getStartTime());
+      assertEquals(header1.getUncompressedSize(), header2.getUncompressedSize());
+      assertEquals(header1.getCompressedSize(), header2.getCompressedSize());
+      assertEquals(header1.getNumOfValues(), header2.getNumOfValues());
+      assertEquals(header1.getEndTime(), header2.getEndTime());
+      assertEquals(header1.getStartTime(), header2.getStartTime());
       if (Utils
           .isTwoObjectsNotNULL(header1.getStatistics(), header2.getStatistics(), "statistics")) {
         Utils.isStatisticsEqual(header1.getStatistics(), header2.getStatistics());
@@ -144,21 +146,23 @@ public class Utils {
   }
 
   public static void isStatisticsEqual(Statistics statistics1, Statistics statistics2) {
-    if ((statistics1 == null) ^ (statistics2 == null)) {
+    if ((statistics1 == null) || (statistics2 == null)) {
       System.out.println("error");
       fail("one of statistics is null");
     }
-    if ((statistics1 != null) && (statistics2 != null)) {
-      if (statistics1.isEmpty() ^ statistics2.isEmpty()) {
-        fail("one of statistics is empty while the other one is not");
-      }
-      if (!statistics1.isEmpty() && !statistics2.isEmpty()) {
-        assertEquals(statistics1.getMinValue(), statistics2.getMinValue());
-        assertEquals(statistics1.getMaxValue(), statistics2.getMaxValue());
-        assertEquals(statistics1.getFirstValue(), statistics2.getFirstValue());
-        assertEquals(statistics1.getSumValue(), statistics2.getSumValue(), maxError);
-        assertEquals(statistics1.getLastValue(), statistics2.getLastValue());
+    if (statistics1.isEmpty() || statistics2.isEmpty()) {
+      fail("one of statistics is empty while the other one is not");
+    }
+    if (!statistics1.isEmpty() && !statistics2.isEmpty()) {
+      assertEquals(statistics1.getMinValue(), statistics2.getMinValue());
+      assertEquals(statistics1.getMaxValue(), statistics2.getMaxValue());
+      assertEquals(statistics1.getFirstValue(), statistics2.getFirstValue());
+      if (statistics1 instanceof IntegerStatistics || statistics1 instanceof BooleanStatistics) {
+        assertEquals(statistics1.getSumLongValue(), statistics2.getSumLongValue());
+      } else {
+        assertEquals(statistics1.getSumDoubleValue(), statistics2.getSumDoubleValue(), maxError);
       }
+      assertEquals(statistics1.getLastValue(), statistics2.getLastValue());
     }
   }
 }
\ No newline at end of file
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
index bb86168..c9931fc 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
@@ -80,7 +80,7 @@ public class TsFileSequenceReaderTest {
           }
           break;
         case MetaMarker.CHUNK_GROUP_HEADER:
-          ChunkGroupHeader footer = reader.readChunkGroupFooter();
+          ChunkGroupHeader footer = reader.readChunkGroupHeader();
           long endOffset = reader.position();
           Pair<Long, Long> pair = new Pair<>(startOffset, endOffset);
           deviceChunkGroupMetadataOffsets.putIfAbsent(footer.getDeviceID(), new ArrayList<>());
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/reader/PageReaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/reader/PageReaderTest.java
index 65d86e6..4f4a681 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/reader/PageReaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/reader/PageReaderTest.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
 import org.apache.iotdb.tsfile.encoding.decoder.DeltaBinaryDecoder;
 import org.apache.iotdb.tsfile.encoding.decoder.DoublePrecisionDecoderV1;
@@ -54,11 +53,11 @@ public class PageReaderTest {
   public void testLong() {
 
     LoopWriteReadTest test = new LoopWriteReadTest("Test INT64",
-        new LongRleEncoder(EndianType.BIG_ENDIAN),
-        new LongRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT64, POINTS_COUNT_IN_ONE_PAGE) {
+        new LongRleEncoder(),
+        new LongRleDecoder(), TSDataType.INT64, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Long.valueOf(Long.MAX_VALUE - i);
+        return Long.MAX_VALUE - i;
       }
     };
     test.test(TSDataType.INT64);
@@ -67,11 +66,11 @@ public class PageReaderTest {
   @Test
   public void testBoolean() {
     LoopWriteReadTest test = new LoopWriteReadTest("Test Boolean",
-        new IntRleEncoder(EndianType.BIG_ENDIAN),
-        new IntRleDecoder(EndianType.BIG_ENDIAN), TSDataType.BOOLEAN, POINTS_COUNT_IN_ONE_PAGE) {
+        new IntRleEncoder(),
+        new IntRleDecoder(), TSDataType.BOOLEAN, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return i % 3 == 0 ? true : false;
+        return i % 3 == 0;
       }
     };
     test.test(TSDataType.BOOLEAN);
@@ -79,12 +78,11 @@ public class PageReaderTest {
 
   @Test
   public void testInt() {
-    LoopWriteReadTest test = new LoopWriteReadTest("Test INT32",
-        new IntRleEncoder(EndianType.BIG_ENDIAN),
-        new IntRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT32, POINTS_COUNT_IN_ONE_PAGE) {
+    LoopWriteReadTest test = new LoopWriteReadTest("Test INT32", new IntRleEncoder(),
+        new IntRleDecoder(), TSDataType.INT32, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Integer.valueOf(i);
+        return i;
       }
     };
     test.test(TSDataType.INT32);
@@ -96,7 +94,7 @@ public class PageReaderTest {
         new SinglePrecisionDecoderV1(), TSDataType.FLOAT, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Float.valueOf(i) / 10 - Float.valueOf(i) / 100;
+        return (float) i / 10 - (float) i / 100;
       }
     };
     test.test(TSDataType.FLOAT);
@@ -105,7 +103,7 @@ public class PageReaderTest {
         new SinglePrecisionDecoderV1(), TSDataType.FLOAT, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Float.valueOf(i) / 100 - Float.valueOf(i) / 10;
+        return (float) i / 100 - (float) i / 10;
       }
     };
     test2.test(TSDataType.FLOAT);
@@ -117,7 +115,7 @@ public class PageReaderTest {
         new DoublePrecisionDecoderV1(), TSDataType.DOUBLE, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Double.valueOf(i) / 10 - Double.valueOf(i) / 100;
+        return (double) i / 10 - (double) i / 100;
       }
     };
     test.test(TSDataType.DOUBLE);
@@ -126,7 +124,7 @@ public class PageReaderTest {
         new DoublePrecisionDecoderV1(), TSDataType.DOUBLE, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Double.valueOf(i) / 1000 - Double.valueOf(i) / 100;
+        return (double) i / 1000 - (double) i / 100;
       }
     };
     test2.test(TSDataType.DOUBLE);
@@ -135,11 +133,11 @@ public class PageReaderTest {
   @Test
   public void testBinary() {
     LoopWriteReadTest test = new LoopWriteReadTest("Test Double",
-        new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.TEXT, 1000),
-        new PlainDecoder(EndianType.BIG_ENDIAN), TSDataType.TEXT, POINTS_COUNT_IN_ONE_PAGE) {
+        new PlainEncoder(TSDataType.TEXT, 1000),
+        new PlainDecoder(), TSDataType.TEXT, POINTS_COUNT_IN_ONE_PAGE) {
       @Override
       public Object generateValueByIndex(int i) {
-        return new Binary(new StringBuilder("TEST TEXT").append(i).toString());
+        return new Binary("TEST TEXT" + i);
       }
     };
     test.test(TSDataType.TEXT);
@@ -238,26 +236,26 @@ public class PageReaderTest {
       }
     }
 
-    private void writeData() throws IOException {
+    private void writeData() {
       for (int i = 0; i < count; i++) {
         switch (dataType) {
           case BOOLEAN:
-            pageWriter.write(Long.valueOf(i), (Boolean) generateValueByIndex(i));
+            pageWriter.write(i, (Boolean) generateValueByIndex(i));
             break;
           case INT32:
-            pageWriter.write(Long.valueOf(i), (Integer) generateValueByIndex(i));
+            pageWriter.write(i, (Integer) generateValueByIndex(i));
             break;
           case INT64:
-            pageWriter.write(Long.valueOf(i), (Long) generateValueByIndex(i));
+            pageWriter.write(i, (Long) generateValueByIndex(i));
             break;
           case FLOAT:
-            pageWriter.write(Long.valueOf(i), (Float) generateValueByIndex(i));
+            pageWriter.write(i, (Float) generateValueByIndex(i));
             break;
           case DOUBLE:
-            pageWriter.write(Long.valueOf(i), (Double) generateValueByIndex(i));
+            pageWriter.write(i, (Double) generateValueByIndex(i));
             break;
           case TEXT:
-            pageWriter.write(Long.valueOf(i), (Binary) generateValueByIndex(i));
+            pageWriter.write(i, (Binary) generateValueByIndex(i));
             break;
 
         }
@@ -270,11 +268,11 @@ public class PageReaderTest {
   @Test
   public void testPageDelete() {
     LoopWriteReadTest test = new LoopWriteReadTest("Test INT64",
-        new LongRleEncoder(EndianType.BIG_ENDIAN),
-        new LongRleDecoder(EndianType.BIG_ENDIAN), TSDataType.INT64, 100) {
+        new LongRleEncoder(),
+        new LongRleDecoder(), TSDataType.INT64, 100) {
       @Override
       public Object generateValueByIndex(int i) {
-        return Long.valueOf(Long.MAX_VALUE - i);
+        return Long.MAX_VALUE - i;
       }
     };
     test.testDelete(TSDataType.INT64);
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
index 61e4786..675a77c 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.constant.TestConstant;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
+import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.metadata.TimeSeriesMetadataTest;
 import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
@@ -87,18 +87,17 @@ public class TsFileIOWriterTest {
     Assert.assertEquals(TSFileConfig.VERSION_NUMBER, reader.readVersionNumber());
     Assert.assertEquals(TSFileConfig.MAGIC_STRING, reader.readTailMagic());
 
+    // chunk group header
+    Assert.assertEquals(MetaMarker.CHUNK_GROUP_HEADER, reader.readMarker());
+    ChunkGroupHeader footer = reader.readChunkGroupHeader();
+    Assert.assertEquals(deviceId, footer.getDeviceID());
+
     // chunk header
-    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
-        .getBytes().length);
-    Assert.assertEquals(MetaMarker.CHUNK_HEADER, reader.readMarker());
-    ChunkHeader header = reader.readChunkHeader();
+    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + 1);
+    Assert.assertEquals(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER, reader.readMarker());
+    ChunkHeader header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
     Assert.assertEquals(TimeSeriesMetadataTest.measurementUID, header.getMeasurementID());
 
-    // chunk group footer
-    Assert.assertEquals(MetaMarker.CHUNK_GROUP_FOOTER, reader.readMarker());
-    ChunkGroupFooter footer = reader.readChunkGroupFooter();
-    Assert.assertEquals(deviceId, footer.getDeviceID());
-
     // separator
     Assert.assertEquals(MetaMarker.VERSION, reader.readMarker());
 
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java
index 05cfc24..3253495 100755
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java
@@ -18,8 +18,12 @@
  */
 package org.apache.iotdb.tsfile.write.writer;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
 import org.apache.iotdb.tsfile.constant.TestConstant;
-import org.apache.iotdb.tsfile.encoding.common.EndianType;
 import org.apache.iotdb.tsfile.encoding.decoder.PlainDecoder;
 import org.apache.iotdb.tsfile.encoding.encoder.PlainEncoder;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
@@ -28,13 +32,6 @@ import org.apache.iotdb.tsfile.utils.Binary;
 import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.write.page.PageWriter;
 import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
-import org.apache.iotdb.tsfile.constant.TestConstant;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
 import org.junit.Test;
 
 public class PageWriterTest {
@@ -42,8 +39,8 @@ public class PageWriterTest {
   @Test
   public void testWriteInt() {
     PageWriter writer = new PageWriter();
-    writer.setTimeEncoder(new PlainEncoder(EndianType.LITTLE_ENDIAN, TSDataType.INT64, 0));
-    writer.setValueEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT32, 0));
+    writer.setTimeEncoder(new PlainEncoder(TSDataType.INT64, 0));
+    writer.setValueEncoder(new PlainEncoder(TSDataType.INT32, 0));
     writer.initStatistics(TSDataType.INT32);
     int value = 1;
     int timeCount = 0;
@@ -58,12 +55,11 @@ public class PageWriterTest {
       byte[] timeBytes = new byte[timeSize];
       buffer.get(timeBytes);
       ByteBuffer buffer2 = ByteBuffer.wrap(timeBytes);
-      PlainDecoder decoder = new PlainDecoder(EndianType.LITTLE_ENDIAN);
+      PlainDecoder decoder = new PlainDecoder();
       for (int i = 0; i < timeCount; i++) {
         assertEquals(i, decoder.readLong(buffer2));
       }
       decoder.reset();
-      decoder.setEndianType(EndianType.BIG_ENDIAN);
       assertEquals(value, decoder.readInt(buffer));
     } catch (IOException e) {
       fail();
@@ -73,8 +69,8 @@ public class PageWriterTest {
   @Test
   public void testWriteLong() {
     PageWriter writer = new PageWriter();
-    writer.setTimeEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT64, 0));
-    writer.setValueEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT64, 0));
+    writer.setTimeEncoder(new PlainEncoder(TSDataType.INT64, 0));
+    writer.setValueEncoder(new PlainEncoder(TSDataType.INT64, 0));
     writer.initStatistics(TSDataType.INT64);
     long value = 123142120391L;
     int timeCount = 0;
@@ -89,7 +85,7 @@ public class PageWriterTest {
       byte[] timeBytes = new byte[timeSize];
       buffer.get(timeBytes);
       ByteBuffer buffer2 = ByteBuffer.wrap(timeBytes);
-      PlainDecoder decoder = new PlainDecoder(EndianType.BIG_ENDIAN);
+      PlainDecoder decoder = new PlainDecoder();
       for (int i = 0; i < timeCount; i++) {
         assertEquals(i, decoder.readLong(buffer2));
       }
@@ -103,8 +99,8 @@ public class PageWriterTest {
   @Test
   public void testWriteFloat() {
     PageWriter writer = new PageWriter();
-    writer.setTimeEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT64, 0));
-    writer.setValueEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.FLOAT, 0));
+    writer.setTimeEncoder(new PlainEncoder(TSDataType.INT64, 0));
+    writer.setValueEncoder(new PlainEncoder(TSDataType.FLOAT, 0));
     writer.initStatistics(TSDataType.FLOAT);
     float value = 2.2f;
     int timeCount = 0;
@@ -119,7 +115,7 @@ public class PageWriterTest {
       byte[] timeBytes = new byte[timeSize];
       buffer.get(timeBytes);
       ByteBuffer buffer2 = ByteBuffer.wrap(timeBytes);
-      PlainDecoder decoder = new PlainDecoder(EndianType.BIG_ENDIAN);
+      PlainDecoder decoder = new PlainDecoder();
       for (int i = 0; i < timeCount; i++) {
         assertEquals(i, decoder.readLong(buffer2));
       }
@@ -133,8 +129,8 @@ public class PageWriterTest {
   @Test
   public void testWriteBoolean() {
     PageWriter writer = new PageWriter();
-    writer.setTimeEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT64, 0));
-    writer.setValueEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.BOOLEAN, 0));
+    writer.setTimeEncoder(new PlainEncoder(TSDataType.INT64, 0));
+    writer.setValueEncoder(new PlainEncoder( TSDataType.BOOLEAN, 0));
     writer.initStatistics(TSDataType.BOOLEAN);
     boolean value = false;
     int timeCount = 0;
@@ -149,7 +145,7 @@ public class PageWriterTest {
       byte[] timeBytes = new byte[timeSize];
       buffer.get(timeBytes);
       ByteBuffer buffer2 = ByteBuffer.wrap(timeBytes);
-      PlainDecoder decoder = new PlainDecoder(EndianType.BIG_ENDIAN);
+      PlainDecoder decoder = new PlainDecoder();
       for (int i = 0; i < timeCount; i++) {
         assertEquals(i, decoder.readLong(buffer2));
       }
@@ -162,8 +158,8 @@ public class PageWriterTest {
   @Test
   public void testWriteBinary() {
     PageWriter writer = new PageWriter();
-    writer.setTimeEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT64, 0));
-    writer.setValueEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.TEXT, 0));
+    writer.setTimeEncoder(new PlainEncoder(TSDataType.INT64, 0));
+    writer.setValueEncoder(new PlainEncoder(TSDataType.TEXT, 0));
     writer.initStatistics(TSDataType.TEXT);
     String value = "I have a dream";
     int timeCount = 0;
@@ -178,7 +174,7 @@ public class PageWriterTest {
       byte[] timeBytes = new byte[timeSize];
       buffer.get(timeBytes);
       ByteBuffer buffer2 = ByteBuffer.wrap(timeBytes);
-      PlainDecoder decoder = new PlainDecoder(EndianType.BIG_ENDIAN);
+      PlainDecoder decoder = new PlainDecoder();
       for (int i = 0; i < timeCount; i++) {
         assertEquals(i, decoder.readLong(buffer2));
       }
@@ -192,8 +188,8 @@ public class PageWriterTest {
   @Test
   public void testWriteDouble() {
     PageWriter writer = new PageWriter();
-    writer.setTimeEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.INT64, 0));
-    writer.setValueEncoder(new PlainEncoder(EndianType.BIG_ENDIAN, TSDataType.DOUBLE, 0));
+    writer.setTimeEncoder(new PlainEncoder(TSDataType.INT64, 0));
+    writer.setValueEncoder(new PlainEncoder(TSDataType.DOUBLE, 0));
     writer.initStatistics(TSDataType.DOUBLE);
     double value = 1d;
     int timeCount = 0;
@@ -208,7 +204,7 @@ public class PageWriterTest {
       byte[] timeBytes = new byte[timeSize];
       buffer.get(timeBytes);
       ByteBuffer buffer2 = ByteBuffer.wrap(timeBytes);
-      PlainDecoder decoder = new PlainDecoder(EndianType.BIG_ENDIAN);
+      PlainDecoder decoder = new PlainDecoder();
       for (int i = 0; i < timeCount; i++) {
         assertEquals(i, decoder.readLong(buffer2));
       }


[iotdb] 03/11: some changes

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit f51296c0bacdd711fdd519ae18db74aa26d62736
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Wed Nov 25 15:46:44 2020 +0800

    some changes
---
 .../iotdb/hadoop/tsfile/record/HDFSTSRecord.java   |  4 +-
 .../org/apache/iotdb/db/metadata/MManager.java     |  6 +--
 .../db/qp/physical/crud/InsertTabletPlan.java      |  4 +-
 .../iotdb/tsfile/file/metadata/ChunkMetadata.java  | 21 +++++-----
 .../tsfile/file/metadata/MetadataIndexEntry.java   |  4 +-
 .../tsfile/file/metadata/MetadataIndexNode.java    |  5 ++-
 .../tsfile/file/metadata/TimeseriesMetadata.java   |  9 +++--
 .../iotdb/tsfile/file/metadata/TsFileMetadata.java | 20 +++++-----
 .../file/metadata/enums/CompressionType.java       | 41 +++-----------------
 .../tsfile/file/metadata/enums/TSDataType.java     | 45 +++-------------------
 .../tsfile/file/metadata/enums/TSEncoding.java     | 40 +++++++++++--------
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 21 +++++++---
 .../iotdb/tsfile/utils/ReadWriteIOUtils.java       | 31 +++++++--------
 .../tsfile/write/schema/MeasurementSchema.java     | 24 ++++++------
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  | 14 -------
 15 files changed, 112 insertions(+), 177 deletions(-)

diff --git a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/record/HDFSTSRecord.java b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/record/HDFSTSRecord.java
index f996094..a5d380e 100644
--- a/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/record/HDFSTSRecord.java
+++ b/hadoop/src/main/java/org/apache/iotdb/hadoop/tsfile/record/HDFSTSRecord.java
@@ -100,7 +100,7 @@ public class HDFSTSRecord implements Writable {
     out.write(deviceId.getBytes(StandardCharsets.UTF_8));
     out.writeInt(dataPointList.size());
     for (DataPoint dataPoint : dataPointList) {
-      out.writeShort(dataPoint.getType().serialize());
+      out.write(dataPoint.getType().serialize());
       out.writeInt(dataPoint.getMeasurementId().getBytes(StandardCharsets.UTF_8).length);
       out.write(dataPoint.getMeasurementId().getBytes(StandardCharsets.UTF_8));
       switch (dataPoint.getType()) {
@@ -139,7 +139,7 @@ public class HDFSTSRecord implements Writable {
     List<DataPoint> dataPoints = new ArrayList<>(len);
 
     for (int i = 0; i < len; i++) {
-      TSDataType dataType = TSDataType.deserialize(in.readShort());
+      TSDataType dataType = TSDataType.deserialize(in.readByte());
       int lenOfMeasurementId = in.readInt();
       byte[] c = new byte[lenOfMeasurementId];
       in.readFully(c);
diff --git a/server/src/main/java/org/apache/iotdb/db/metadata/MManager.java b/server/src/main/java/org/apache/iotdb/db/metadata/MManager.java
index d349f21..488b135 100644
--- a/server/src/main/java/org/apache/iotdb/db/metadata/MManager.java
+++ b/server/src/main/java/org/apache/iotdb/db/metadata/MManager.java
@@ -350,9 +350,9 @@ public class MManager {
         }
 
         CreateTimeSeriesPlan plan = new CreateTimeSeriesPlan(new PartialPath(args[1]),
-            TSDataType.deserialize(Short.parseShort(args[2])),
-            TSEncoding.deserialize(Short.parseShort(args[3])),
-            CompressionType.deserialize(Short.parseShort(args[4])), props, tagMap, null, alias);
+            TSDataType.deserialize(Byte.parseByte(args[2])),
+            TSEncoding.deserialize(Byte.parseByte(args[3])),
+            CompressionType.deserialize(Byte.parseByte(args[4])), props, tagMap, null, alias);
 
         createTimeseries(plan, offset);
         break;
diff --git a/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertTabletPlan.java b/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertTabletPlan.java
index daa9d84..abdbe6f 100644
--- a/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertTabletPlan.java
+++ b/server/src/main/java/org/apache/iotdb/db/qp/physical/crud/InsertTabletPlan.java
@@ -165,7 +165,7 @@ public class InsertTabletPlan extends InsertPlan {
       if (dataType == null) {
         continue;
       }
-      stream.writeShort(dataType.serialize());
+      stream.write(dataType.serialize());
     }
   }
 
@@ -403,7 +403,7 @@ public class InsertTabletPlan extends InsertPlan {
 
     this.dataTypes = new TSDataType[measurementSize];
     for (int i = 0; i < measurementSize; i++) {
-      dataTypes[i] = TSDataType.deserialize(buffer.getShort());
+      dataTypes[i] = TSDataType.deserialize(buffer.get());
     }
 
     int rows = buffer.getInt();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java
index 71e2596..6f494b3 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java
@@ -83,9 +83,9 @@ public class ChunkMetadata implements Accountable {
    * constructor of ChunkMetaData.
    *
    * @param measurementUid measurement id
-   * @param tsDataType time series data type
-   * @param fileOffset file offset
-   * @param statistics value statistics
+   * @param tsDataType     time series data type
+   * @param fileOffset     file offset
+   * @param statistics     value statistics
    */
   public ChunkMetadata(String measurementUid, TSDataType tsDataType, long fileOffset,
       Statistics statistics) {
@@ -144,10 +144,7 @@ public class ChunkMetadata implements Accountable {
    */
   public int serializeTo(OutputStream outputStream) throws IOException {
     int byteLen = 0;
-
-    byteLen += ReadWriteIOUtils.write(measurementUid, outputStream);
     byteLen += ReadWriteIOUtils.write(offsetOfChunkHeader, outputStream);
-    byteLen += ReadWriteIOUtils.write(tsDataType, outputStream);
     byteLen += statistics.serialize(outputStream);
     return byteLen;
   }
@@ -155,16 +152,18 @@ public class ChunkMetadata implements Accountable {
   /**
    * deserialize from ByteBuffer.
    *
-   * @param buffer ByteBuffer
+   * @param buffer          ByteBuffer
+   * @param measurementUid  measurementUid of this chunk metadata
+   * @param tsDataType      data type of this chunk metadata
    * @return ChunkMetaData object
    */
-  public static ChunkMetadata deserializeFrom(ByteBuffer buffer) {
+  public static ChunkMetadata deserializeFrom(ByteBuffer buffer, String measurementUid,
+      TSDataType tsDataType) {
     ChunkMetadata chunkMetaData = new ChunkMetadata();
 
-    chunkMetaData.measurementUid = ReadWriteIOUtils.readString(buffer);
+    chunkMetaData.measurementUid = measurementUid;
+    chunkMetaData.tsDataType = tsDataType;
     chunkMetaData.offsetOfChunkHeader = ReadWriteIOUtils.readLong(buffer);
-    chunkMetaData.tsDataType = ReadWriteIOUtils.readDataType(buffer);
-
     chunkMetaData.statistics = Statistics.deserialize(buffer, chunkMetaData.tsDataType);
 
     return chunkMetaData;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexEntry.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexEntry.java
index 5325992..73667ba 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexEntry.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexEntry.java
@@ -56,13 +56,13 @@ public class MetadataIndexEntry {
 
   public int serializeTo(OutputStream outputStream) throws IOException {
     int byteLen = 0;
-    byteLen += ReadWriteIOUtils.write(name, outputStream);
+    byteLen += ReadWriteIOUtils.writeVar(name, outputStream);
     byteLen += ReadWriteIOUtils.write(offset, outputStream);
     return byteLen;
   }
 
   public static MetadataIndexEntry deserializeFrom(ByteBuffer buffer) {
-    String name = ReadWriteIOUtils.readString(buffer);
+    String name = ReadWriteIOUtils.readVarIntString(buffer);
     long offset = ReadWriteIOUtils.readLong(buffer);
     return new MetadataIndexEntry(name, offset);
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java
index 95f67f5..0600fd4 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java
@@ -27,6 +27,7 @@ import java.util.List;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.utils.Pair;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class MetadataIndexNode {
@@ -87,7 +88,7 @@ public class MetadataIndexNode {
 
   public int serializeTo(OutputStream outputStream) throws IOException {
     int byteLen = 0;
-    byteLen += ReadWriteIOUtils.write(children.size(), outputStream);
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(children.size(), outputStream);
     for (MetadataIndexEntry metadataIndexEntry : children) {
       byteLen += metadataIndexEntry.serializeTo(outputStream);
     }
@@ -98,7 +99,7 @@ public class MetadataIndexNode {
 
   public static MetadataIndexNode deserializeFrom(ByteBuffer buffer) {
     List<MetadataIndexEntry> children = new ArrayList<>();
-    int size = ReadWriteIOUtils.readInt(buffer);
+    int size = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
     for (int i = 0; i < size; i++) {
       children.add(MetadataIndexEntry.deserializeFrom(buffer));
     }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
index 0869643..6188e1d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
@@ -27,6 +27,7 @@ import org.apache.iotdb.tsfile.common.cache.Accountable;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.statistics.Statistics;
 import org.apache.iotdb.tsfile.read.controller.IChunkMetadataLoader;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class TimeseriesMetadata implements Accountable {
@@ -75,7 +76,8 @@ public class TimeseriesMetadata implements Accountable {
     timeseriesMetaData.setMeasurementId(ReadWriteIOUtils.readString(buffer));
     timeseriesMetaData.setTSDataType(ReadWriteIOUtils.readDataType(buffer));
     timeseriesMetaData.setOffsetOfChunkMetaDataList(ReadWriteIOUtils.readLong(buffer));
-    timeseriesMetaData.setDataSizeOfChunkMetaDataList(ReadWriteIOUtils.readInt(buffer));
+    timeseriesMetaData
+        .setDataSizeOfChunkMetaDataList(ReadWriteForEncodingUtils.readUnsignedVarInt(buffer));
     timeseriesMetaData.setStatistics(Statistics.deserialize(buffer, timeseriesMetaData.dataType));
     return timeseriesMetaData;
   }
@@ -92,7 +94,8 @@ public class TimeseriesMetadata implements Accountable {
     byteLen += ReadWriteIOUtils.write(measurementId, outputStream);
     byteLen += ReadWriteIOUtils.write(dataType, outputStream);
     byteLen += ReadWriteIOUtils.write(startOffsetOfChunkMetaDataList, outputStream);
-    byteLen += ReadWriteIOUtils.write(chunkMetaDataListDataSize, outputStream);
+    byteLen += ReadWriteForEncodingUtils
+        .writeUnsignedVarInt(chunkMetaDataListDataSize, outputStream);
     byteLen += statistics.serialize(outputStream);
     return byteLen;
   }
@@ -161,7 +164,7 @@ public class TimeseriesMetadata implements Accountable {
   public long getRamSize() {
     return ramSize;
   }
-  
+
   public void setSeq(boolean seq) {
     isSeq = seq;
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java
index 33c5f8d..2831aa5 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java
@@ -77,9 +77,9 @@ public class TsFileMetadata {
 
     // read bloom filter
     if (buffer.hasRemaining()) {
-      byte[] bytes = ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(buffer).array();
-      int filterSize = ReadWriteIOUtils.readInt(buffer);
-      int hashFunctionSize = ReadWriteIOUtils.readInt(buffer);
+      byte[] bytes = ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(buffer);
+      int filterSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
+      int hashFunctionSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
       fileMetaData.bloomFilter = BloomFilter.buildBloomFilter(bytes, filterSize, hashFunctionSize);
     }
 
@@ -103,15 +103,12 @@ public class TsFileMetadata {
     if (metadataIndex != null) {
       byteLen += metadataIndex.serializeTo(outputStream);
     } else {
+      // TODO Maybe should throw an exception
       byteLen += ReadWriteIOUtils.write(0, outputStream);
     }
 
-    // totalChunkNum, invalidChunkNum
-    byteLen += ReadWriteIOUtils.write(totalChunkNum, outputStream);
-    byteLen += ReadWriteIOUtils.write(invalidChunkNum, outputStream);
-
     // versionInfo
-    byteLen += ReadWriteIOUtils.write(versionInfo.size(), outputStream);
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(versionInfo.size(), outputStream);
     for (Pair<Long, Long> versionPair : versionInfo) {
       byteLen += ReadWriteIOUtils.write(versionPair.left, outputStream);
       byteLen += ReadWriteIOUtils.write(versionPair.right, outputStream);
@@ -135,11 +132,12 @@ public class TsFileMetadata {
     BloomFilter filter = buildBloomFilter(paths);
 
     byte[] bytes = filter.serialize();
-    byteLen += ReadWriteIOUtils.write(bytes.length, outputStream);
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(bytes.length, outputStream);
     outputStream.write(bytes);
     byteLen += bytes.length;
-    byteLen += ReadWriteIOUtils.write(filter.getSize(), outputStream);
-    byteLen += ReadWriteIOUtils.write(filter.getHashFunctionSize(), outputStream);
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(filter.getSize(), outputStream);
+    byteLen += ReadWriteForEncodingUtils
+        .writeUnsignedVarInt(filter.getHashFunctionSize(), outputStream);
     return byteLen;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/CompressionType.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/CompressionType.java
index e8380e6..5976e09 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/CompressionType.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/CompressionType.java
@@ -22,24 +22,12 @@ public enum CompressionType {
   UNCOMPRESSED, SNAPPY, GZIP, LZO, SDT, PAA, PLA, LZ4;
 
   /**
-   * deserialize short number.
+   * deserialize byte number.
    *
-   * @param compressor short number
+   * @param compressor byte number
    * @return CompressionType
    */
-  public static CompressionType deserialize(short compressor) {
-    return getCompressionType(compressor);
-  }
-
-  public static byte deserializeToByte(short compressor) {
-    if (compressor >= 8 || compressor < 0) {
-      throw new IllegalArgumentException("Invalid input: " + compressor);
-    }
-    return (byte) compressor;
-  }
-
-
-  private static CompressionType getCompressionType(short compressor) {
+  public static CompressionType deserialize(byte compressor) {
     if (compressor >= 8 || compressor < 0) {
       throw new IllegalArgumentException("Invalid input: " + compressor);
     }
@@ -63,33 +51,14 @@ public enum CompressionType {
     }
   }
 
-  /**
-   * give an byte to return a compression type.
-   *
-   * @param compressor byte number
-   * @return CompressionType
-   */
-  public static CompressionType byteToEnum(byte compressor) {
-    return getCompressionType(compressor);
-  }
-
   public static int getSerializedSize() {
-    return Short.BYTES;
-  }
-
-  /**
-   * serialize.
-   *
-   * @return short number
-   */
-  public short serialize() {
-    return enumToByte();
+    return Byte.BYTES;
   }
 
   /**
    * @return byte number
    */
-  public byte enumToByte() {
+  public byte serialize() {
     switch (this) {
       case SNAPPY:
         return 1;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSDataType.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSDataType.java
index 505be1b..79dd1e0 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSDataType.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSDataType.java
@@ -32,12 +32,7 @@ public enum TSDataType {
    * @param type -param to judge enum type
    * @return -enum type
    */
-  public static TSDataType deserialize(short type) {
-    return getTsDataType(type);
-  }
-
-
-  private static TSDataType getTsDataType(short type) {
+  public static TSDataType deserialize(byte type) {
     if (type >= 6 || type < 0) {
       throw new IllegalArgumentException("Invalid input: " + type);
     }
@@ -57,46 +52,16 @@ public enum TSDataType {
     }
   }
 
-  public static byte deserializeToByte(short type) {
-    if (type >= 6 || type < 0) {
-      throw new IllegalArgumentException("Invalid input: " + type);
-    }
-    return (byte) type;
-  }
-
-  /**
-   * give an byte to return a data type.
-   *
-   * @param type byte number
-   * @return data type
-   */
-  public static TSDataType byteToEnum(byte type) {
-    return getTsDataType(type);
-  }
-
-  public static TSDataType deserializeFrom(ByteBuffer buffer) {
-    return deserialize(buffer.getShort());
-  }
-
   public static int getSerializedSize() {
-    return Short.BYTES;
+    return Byte.BYTES;
   }
 
   public void serializeTo(ByteBuffer byteBuffer) {
-    byteBuffer.putShort(serialize());
+    byteBuffer.put(serialize());
   }
 
   public void serializeTo(DataOutputStream outputStream) throws IOException {
-    outputStream.writeShort(serialize());
-  }
-
-  /**
-   * return a serialize data type.
-   *
-   * @return -enum type
-   */
-  public short serialize() {
-    return enumToByte();
+    outputStream.write(serialize());
   }
 
   public int getDataTypeSize() {
@@ -119,7 +84,7 @@ public enum TSDataType {
   /**
    * @return byte number
    */
-  public byte enumToByte() {
+  public byte serialize() {
     switch (this) {
       case BOOLEAN:
         return 0;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java
index a909e47..9232fb9 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java
@@ -28,18 +28,33 @@ public enum TSEncoding {
    * @param encoding -use to determine encoding type
    * @return -encoding type
    */
-  public static TSEncoding deserialize(short encoding) {
-    return getTsEncoding(encoding);
-  }
-
-  public static byte deserializeToByte(short encoding) {
+  public static TSEncoding deserialize(byte encoding) {
     if (encoding < 0 || 8 < encoding) {
       throw new IllegalArgumentException("Invalid input: " + encoding);
     }
-    return (byte) encoding;
+    switch (encoding) {
+      case 1:
+        return PLAIN_DICTIONARY;
+      case 2:
+        return RLE;
+      case 3:
+        return DIFF;
+      case 4:
+        return TS_2DIFF;
+      case 5:
+        return BITMAP;
+      case 6:
+        return GORILLA_V1;
+      case 7:
+        return REGULAR;
+      case 8:
+        return GORILLA;
+      default:
+        return PLAIN;
+    }
   }
 
-  private static TSEncoding getTsEncoding(short encoding) {
+  private static TSEncoding getTsEncoding(byte encoding) {
     if (encoding < 0 || 8 < encoding) {
       throw new IllegalArgumentException("Invalid input: " + encoding);
     }
@@ -76,7 +91,7 @@ public enum TSEncoding {
   }
 
   public static int getSerializedSize() {
-    return Short.BYTES;
+    return Byte.BYTES;
   }
 
   /**
@@ -84,14 +99,7 @@ public enum TSEncoding {
    *
    * @return -encoding type
    */
-  public short serialize() {
-    return enumToByte();
-  }
-
-  /**
-   * @return byte number
-   */
-  public byte enumToByte() {
+  public byte serialize() {
     switch (this) {
       case PLAIN_DICTIONARY:
         return 1;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index 9b9b3f4..16e2b71 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -491,9 +491,7 @@ public class TsFileSequenceReader implements AutoCloseable {
    */
   public Map<String, List<ChunkMetadata>> readChunkMetadataInDevice(String device)
       throws IOException {
-    if (tsFileMetaData == null) {
-      readFileMetadata();
-    }
+    readFileMetadata();
 
     long start = 0;
     int size = 0;
@@ -507,8 +505,16 @@ public class TsFileSequenceReader implements AutoCloseable {
     // read buffer of all ChunkMetadatas of this device
     ByteBuffer buffer = readData(start, size);
     Map<String, List<ChunkMetadata>> seriesMetadata = new HashMap<>();
+    int index = 0;
+    int curSize = timeseriesMetadataMap.get(index).getDataSizeOfChunkMetaDataList();
     while (buffer.hasRemaining()) {
-      ChunkMetadata chunkMetadata = ChunkMetadata.deserializeFrom(buffer);
+      if (buffer.position() >= curSize) {
+        index++;
+        curSize += timeseriesMetadataMap.get(index).getDataSizeOfChunkMetaDataList();
+      }
+      ChunkMetadata chunkMetadata = ChunkMetadata
+          .deserializeFrom(buffer, timeseriesMetadataMap.get(index).getMeasurementId(),
+              timeseriesMetadataMap.get(index).getTSDataType());
       seriesMetadata.computeIfAbsent(chunkMetadata.getMeasurementUid(), key -> new ArrayList<>())
           .add(chunkMetadata);
     }
@@ -693,7 +699,8 @@ public class TsFileSequenceReader implements AutoCloseable {
 
   /**
    * read the chunk's header.
-   *  @param position        the file offset of this chunk's header
+   *
+   * @param position        the file offset of this chunk's header
    * @param chunkHeaderSize the size of chunk's header
    */
   private ChunkHeader readChunkHeader(long position, int chunkHeaderSize) throws IOException {
@@ -1034,7 +1041,9 @@ public class TsFileSequenceReader implements AutoCloseable {
 
     ByteBuffer buffer = readData(startOffsetOfChunkMetadataList, dataSizeOfChunkMetadataList);
     while (buffer.hasRemaining()) {
-      chunkMetadataList.add(ChunkMetadata.deserializeFrom(buffer));
+      chunkMetadataList.add(ChunkMetadata
+          .deserializeFrom(buffer, timeseriesMetaData.getMeasurementId(),
+              timeseriesMetaData.getTSDataType()));
     }
 
     VersionUtils.applyVersion(chunkMetadataList, versionInfo);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
index 524ee01..a5b02de 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
@@ -446,7 +446,7 @@ public class ReadWriteIOUtils {
    */
   public static int write(CompressionType compressionType, OutputStream outputStream)
       throws IOException {
-    short n = compressionType.serialize();
+    byte n = compressionType.serialize();
     return write(n, outputStream);
   }
 
@@ -454,7 +454,7 @@ public class ReadWriteIOUtils {
    * write compressionType to byteBuffer.
    */
   public static int write(CompressionType compressionType, ByteBuffer buffer) {
-    short n = compressionType.serialize();
+    byte n = compressionType.serialize();
     return write(n, buffer);
   }
 
@@ -462,12 +462,12 @@ public class ReadWriteIOUtils {
    * TSDataType.
    */
   public static int write(TSDataType dataType, OutputStream outputStream) throws IOException {
-    short n = dataType.serialize();
+    byte n = dataType.serialize();
     return write(n, outputStream);
   }
 
   public static int write(TSDataType dataType, ByteBuffer buffer) {
-    short n = dataType.serialize();
+    byte n = dataType.serialize();
     return write(n, buffer);
   }
 
@@ -475,12 +475,12 @@ public class ReadWriteIOUtils {
    * TSEncoding.
    */
   public static int write(TSEncoding encoding, OutputStream outputStream) throws IOException {
-    short n = encoding.serialize();
+    byte n = encoding.serialize();
     return write(n, outputStream);
   }
 
   public static int write(TSEncoding encoding, ByteBuffer buffer) {
-    short n = encoding.serialize();
+    byte n = encoding.serialize();
     return write(n, buffer);
   }
 
@@ -768,14 +768,11 @@ public class ReadWriteIOUtils {
    * <p>
    * read a int + buffer
    */
-  public static ByteBuffer readByteBufferWithSelfDescriptionLength(ByteBuffer buffer) {
+  public static byte[] readByteBufferWithSelfDescriptionLength(ByteBuffer buffer) {
     int byteLength = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
     byte[] bytes = new byte[byteLength];
     buffer.get(bytes);
-    ByteBuffer byteBuffer = ByteBuffer.allocate(byteLength);
-    byteBuffer.put(bytes);
-    byteBuffer.flip();
-    return byteBuffer;
+    return bytes;
   }
 
   /**
@@ -913,32 +910,32 @@ public class ReadWriteIOUtils {
 
   public static CompressionType readCompressionType(InputStream inputStream) throws IOException {
     byte n = readByte(inputStream);
-    return CompressionType.byteToEnum(n);
+    return CompressionType.deserialize(n);
   }
 
   public static CompressionType readCompressionType(ByteBuffer buffer) {
     byte n = buffer.get();
-    return CompressionType.byteToEnum(n);
+    return CompressionType.deserialize(n);
   }
 
   public static TSDataType readDataType(InputStream inputStream) throws IOException {
     byte n = readByte(inputStream);
-    return TSDataType.byteToEnum(n);
+    return TSDataType.deserialize(n);
   }
 
   public static TSDataType readDataType(ByteBuffer buffer) {
     byte n = buffer.get();
-    return TSDataType.byteToEnum(n);
+    return TSDataType.deserialize(n);
   }
 
   public static TSEncoding readEncoding(InputStream inputStream) throws IOException {
     byte n = readByte(inputStream);
-    return TSEncoding.byteToEnum(n);
+    return TSEncoding.deserialize(n);
   }
 
   public static TSEncoding readEncoding(ByteBuffer buffer) {
     byte n = buffer.get();
-    return TSEncoding.byteToEnum(n);
+    return TSEncoding.deserialize(n);
   }
 
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
index 11030c8..4ec4a23 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
@@ -87,11 +87,11 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
    */
   public MeasurementSchema(String measurementId, TSDataType type, TSEncoding encoding,
       CompressionType compressionType, Map<String, String> props) {
-    this.type = type.enumToByte();
+    this.type = type.serialize();
     this.measurementId = measurementId;
-    this.encoding = encoding.enumToByte();
+    this.encoding = encoding.serialize();
     this.props = props;
-    this.compressor = compressionType.enumToByte();
+    this.compressor = compressionType.serialize();
   }
 
   public MeasurementSchema(String measurementId, byte type, byte encoding,
@@ -111,11 +111,11 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
 
     measurementSchema.measurementId = ReadWriteIOUtils.readString(inputStream);
 
-    measurementSchema.type = TSDataType.deserializeToByte(ReadWriteIOUtils.readShort(inputStream));
+    measurementSchema.type = ReadWriteIOUtils.readByte(inputStream);
 
-    measurementSchema.encoding = TSEncoding.deserializeToByte(ReadWriteIOUtils.readShort(inputStream));
+    measurementSchema.encoding = ReadWriteIOUtils.readByte(inputStream);
 
-    measurementSchema.compressor = CompressionType.deserializeToByte(ReadWriteIOUtils.readShort(inputStream));
+    measurementSchema.compressor = ReadWriteIOUtils.readByte(inputStream);
 
     int size = ReadWriteIOUtils.readInt(inputStream);
     if (size > 0) {
@@ -140,11 +140,11 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
 
     measurementSchema.measurementId = ReadWriteIOUtils.readString(buffer);
 
-    measurementSchema.type = TSDataType.deserializeToByte(ReadWriteIOUtils.readShort(buffer));
+    measurementSchema.type = ReadWriteIOUtils.readByte(buffer);
 
-    measurementSchema.encoding = TSEncoding.deserializeToByte(ReadWriteIOUtils.readShort(buffer));
+    measurementSchema.encoding = ReadWriteIOUtils.readByte(buffer);
 
-    measurementSchema.compressor = CompressionType.deserializeToByte(ReadWriteIOUtils.readShort(buffer));
+    measurementSchema.compressor = ReadWriteIOUtils.readByte(buffer);
 
     int size = ReadWriteIOUtils.readInt(buffer);
     if (size > 0) {
@@ -178,7 +178,7 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
   }
 
   public TSDataType getType() {
-    return TSDataType.byteToEnum(type);
+    return TSDataType.deserialize(type);
   }
 
   public void setProps(Map<String, String> props) {
@@ -212,7 +212,7 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
   }
 
   public CompressionType getCompressor() {
-    return CompressionType.byteToEnum(compressor);
+    return CompressionType.deserialize(compressor);
   }
 
   /**
@@ -312,6 +312,6 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
   }
 
   public void setType(TSDataType type) {
-    this.type = (byte) type.serialize();
+    this.type = type.serialize();
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index 40a9161..3c83308 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -72,8 +72,6 @@ public class TsFileIOWriter {
 
   protected TsFileOutput out;
   protected boolean canWrite = true;
-  protected int totalChunkNum = 0;
-  protected int invalidChunkNum;
   protected File file;
 
   // current flushed Chunk
@@ -207,7 +205,6 @@ public class TsFileIOWriter {
   public void endCurrentChunk() {
     chunkMetadataList.add(currentChunkMetadata);
     currentChunkMetadata = null;
-    totalChunkNum++;
   }
 
   /**
@@ -234,8 +231,6 @@ public class TsFileIOWriter {
     TsFileMetadata tsFileMetaData = new TsFileMetadata();
     tsFileMetaData.setMetadataIndex(metadataIndex);
     tsFileMetaData.setVersionInfo(versionInfo);
-    tsFileMetaData.setTotalChunkNum(totalChunkNum);
-    tsFileMetaData.setInvalidChunkNum(invalidChunkNum);
     tsFileMetaData.setMetaOffset(metaOffset);
 
     long footerIndex = out.getPosition();
@@ -359,14 +354,6 @@ public class TsFileIOWriter {
     out.write(new byte[]{MetaMarker.CHUNK_HEADER});
   }
 
-  public int getTotalChunkNum() {
-    return totalChunkNum;
-  }
-
-  public int getInvalidChunkNum() {
-    return invalidChunkNum;
-  }
-
   public File getFile() {
     return file;
   }
@@ -400,7 +387,6 @@ public class TsFileIOWriter {
         if (!chunkValid) {
           chunkMetaDataIterator.remove();
           chunkNum--;
-          invalidChunkNum++;
         } else {
           startTimeIdxes.put(path, startTimeIdx + 1);
         }


[iotdb] 02/11: change some

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 314c38af5197aaf33079761021a28108e655cfbf
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Wed Nov 25 14:29:30 2020 +0800

    change some
---
 .../iotdb/tsfile/file/header/ChunkHeader.java      |  4 ++--
 .../iotdb/tsfile/file/metadata/TsFileMetadata.java | 27 ++--------------------
 .../metadata/statistics/BooleanStatistics.java     |  9 +++++++-
 .../file/metadata/statistics/DoubleStatistics.java | 19 +++++++++------
 .../file/metadata/statistics/FloatStatistics.java  | 19 +++++++++------
 .../file/metadata/statistics/LongStatistics.java   | 27 ++++++++++++++--------
 .../iotdb/tsfile/utils/ReadWriteIOUtils.java       |  2 +-
 .../write/writer/ForceAppendTsFileWriter.java      |  4 +---
 8 files changed, 55 insertions(+), 56 deletions(-)

diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
index c318d44..6f9beac 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
@@ -173,7 +173,7 @@ public class ChunkHeader {
    */
   public int serializeTo(OutputStream outputStream) throws IOException {
     int length = 0;
-    length += ReadWriteIOUtils.write(MetaMarker.CHUNK_HEADER, outputStream);
+    length += ReadWriteIOUtils.write(chunkType, outputStream);
     length += ReadWriteIOUtils.write(measurementID, outputStream);
     length += ReadWriteIOUtils.write(dataSize, outputStream);
     length += ReadWriteIOUtils.write(dataType, outputStream);
@@ -191,7 +191,7 @@ public class ChunkHeader {
    */
   public int serializeTo(ByteBuffer buffer) {
     int length = 0;
-    length += ReadWriteIOUtils.write(MetaMarker.CHUNK_HEADER, buffer);
+    length += ReadWriteIOUtils.write(chunkType, buffer);
     length += ReadWriteIOUtils.write(measurementID, buffer);
     length += ReadWriteIOUtils.write(dataSize, buffer);
     length += ReadWriteIOUtils.write(dataType, buffer);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java
index 6098284..33c5f8d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadata.java
@@ -29,6 +29,7 @@ import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.read.common.Path;
 import org.apache.iotdb.tsfile.utils.BloomFilter;
 import org.apache.iotdb.tsfile.utils.Pair;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 /**
@@ -36,13 +37,6 @@ import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
  */
 public class TsFileMetadata {
 
-  // fields below are IoTDB extensions and they does not affect TsFile's
-  // stand-alone functionality
-  private int totalChunkNum;
-  // invalid means a chunk has been rewritten by merge and the chunk's data is in
-  // another new chunk
-  private int invalidChunkNum;
-
   // bloom filter
   private BloomFilter bloomFilter;
 
@@ -66,12 +60,10 @@ public class TsFileMetadata {
 
     // metadataIndex
     fileMetaData.metadataIndex = MetadataIndexNode.deserializeFrom(buffer);
-    fileMetaData.totalChunkNum = ReadWriteIOUtils.readInt(buffer);
-    fileMetaData.invalidChunkNum = ReadWriteIOUtils.readInt(buffer);
 
     // versionInfo
     List<Pair<Long, Long>> versionInfo = new ArrayList<>();
-    int versionSize = ReadWriteIOUtils.readInt(buffer);
+    int versionSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
     for (int i = 0; i < versionSize; i++) {
       long versionPos = ReadWriteIOUtils.readLong(buffer);
       long version = ReadWriteIOUtils.readLong(buffer);
@@ -166,21 +158,6 @@ public class TsFileMetadata {
     return filter;
   }
 
-  public int getTotalChunkNum() {
-    return totalChunkNum;
-  }
-
-  public void setTotalChunkNum(int totalChunkNum) {
-    this.totalChunkNum = totalChunkNum;
-  }
-
-  public int getInvalidChunkNum() {
-    return invalidChunkNum;
-  }
-
-  public void setInvalidChunkNum(int invalidChunkNum) {
-    this.invalidChunkNum = invalidChunkNum;
-  }
 
   public long getMetaOffset() {
     return metaOffset;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
index e24ef46..ddbdf34 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/BooleanStatistics.java
@@ -200,6 +200,7 @@ public class BooleanStatistics extends Statistics<Boolean> {
     int byteLen = 0;
     byteLen += ReadWriteIOUtils.write(firstValue, outputStream);
     byteLen += ReadWriteIOUtils.write(lastValue, outputStream);
+    byteLen += ReadWriteIOUtils.write(sumValue, outputStream);
     return byteLen;
   }
 
@@ -207,16 +208,22 @@ public class BooleanStatistics extends Statistics<Boolean> {
   void deserialize(InputStream inputStream) throws IOException {
     this.firstValue = ReadWriteIOUtils.readBool(inputStream);
     this.lastValue = ReadWriteIOUtils.readBool(inputStream);
+    this.sumValue = ReadWriteIOUtils.readLong(inputStream);
   }
 
   @Override
   void deserialize(ByteBuffer byteBuffer) {
     this.firstValue = ReadWriteIOUtils.readBool(byteBuffer);
     this.lastValue = ReadWriteIOUtils.readBool(byteBuffer);
+    this.sumValue = ReadWriteIOUtils.readLong(byteBuffer);
   }
 
   @Override
   public String toString() {
-    return super.toString() + " [firstValue:" + firstValue + ",lastValue:" + lastValue + "]";
+    return "BooleanStatistics{" +
+        "firstValue=" + firstValue +
+        ", lastValue=" + lastValue +
+        ", sumValue=" + sumValue +
+        '}';
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatistics.java
index ef7293c..aba11ba 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/DoubleStatistics.java
@@ -18,14 +18,14 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.utils.BytesUtils;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.utils.BytesUtils;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class DoubleStatistics extends Statistics<Double> {
 
@@ -143,20 +143,25 @@ public class DoubleStatistics extends Statistics<Double> {
   }
 
   @Override
-  public double getSumValue() {
+  public double getSumDoubleValue() {
     return sumValue;
   }
 
   @Override
+  public long getSumLongValue() {
+    throw new StatisticsClassException("Double statistics does not support: long sum");
+  }
+
+  @Override
   protected void mergeStatisticsValue(Statistics stats) {
     DoubleStatistics doubleStats = (DoubleStatistics) stats;
     if (this.isEmpty) {
       initializeStats(doubleStats.getMinValue(), doubleStats.getMaxValue(), doubleStats.getFirstValue(),
-          doubleStats.getLastValue(), doubleStats.getSumValue());
+          doubleStats.getLastValue(), doubleStats.sumValue);
       isEmpty = false;
     } else {
       updateStats(doubleStats.getMinValue(), doubleStats.getMaxValue(), doubleStats.getFirstValue(),
-          doubleStats.getLastValue(), doubleStats.getSumValue(), stats.getStartTime(), stats.getEndTime());
+          doubleStats.getLastValue(), doubleStats.sumValue, stats.getStartTime(), stats.getEndTime());
     }
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatistics.java
index 3080fd7..106ff32 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/FloatStatistics.java
@@ -18,14 +18,14 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.utils.BytesUtils;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.utils.BytesUtils;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 /**
  * Statistics for float type.
@@ -138,20 +138,25 @@ public class FloatStatistics extends Statistics<Float> {
   }
 
   @Override
-  public double getSumValue() {
+  public double getSumDoubleValue() {
     return sumValue;
   }
 
   @Override
+  public long getSumLongValue() {
+    throw new StatisticsClassException("Float statistics does not support: long sum");
+  }
+
+  @Override
   protected void mergeStatisticsValue(Statistics stats) {
     FloatStatistics floatStats = (FloatStatistics) stats;
     if (isEmpty) {
       initializeStats(floatStats.getMinValue(), floatStats.getMaxValue(), floatStats.getFirstValue(),
-          floatStats.getLastValue(), floatStats.getSumValue());
+          floatStats.getLastValue(), floatStats.sumValue);
       isEmpty = false;
     } else {
       updateStats(floatStats.getMinValue(), floatStats.getMaxValue(), floatStats.getFirstValue(),
-          floatStats.getLastValue(), floatStats.getSumValue(), stats.getStartTime(), stats.getEndTime());
+          floatStats.getLastValue(), floatStats.sumValue, stats.getStartTime(), stats.getEndTime());
     }
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatistics.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatistics.java
index e4958d3..60cc881 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatistics.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/statistics/LongStatistics.java
@@ -18,14 +18,14 @@
  */
 package org.apache.iotdb.tsfile.file.metadata.statistics;
 
-import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
-import org.apache.iotdb.tsfile.utils.BytesUtils;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import org.apache.iotdb.tsfile.exception.filter.StatisticsClassException;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.utils.BytesUtils;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class LongStatistics extends Statistics<Long> {
 
@@ -67,7 +67,8 @@ public class LongStatistics extends Statistics<Long> {
     this.lastValue = lastValue;
   }
 
-  private void updateStats(long minValue, long maxValue, long firstValue, long lastValue, double sumValue, long startTime, long endTime) {
+  private void updateStats(long minValue, long maxValue, long firstValue, long lastValue,
+      double sumValue, long startTime, long endTime) {
     if (minValue < this.minValue) {
       this.minValue = minValue;
     }
@@ -113,11 +114,16 @@ public class LongStatistics extends Statistics<Long> {
   }
 
   @Override
-  public double getSumValue() {
+  public double getSumDoubleValue() {
     return sumValue;
   }
 
   @Override
+  public long getSumLongValue() {
+    throw new StatisticsClassException("Long statistics does not support: long sum");
+  }
+
+  @Override
   void updateStats(long value) {
     if (isEmpty) {
       initializeStats(value, value, value, value, value);
@@ -154,11 +160,11 @@ public class LongStatistics extends Statistics<Long> {
     LongStatistics longStats = (LongStatistics) stats;
     if (isEmpty) {
       initializeStats(longStats.getMinValue(), longStats.getMaxValue(), longStats.getFirstValue(),
-          longStats.getLastValue(), longStats.getSumValue());
+          longStats.getLastValue(), longStats.sumValue);
       isEmpty = false;
     } else {
-      updateStats(longStats.getMinValue(), longStats.getMaxValue(), longStats.getFirstValue(), longStats.getLastValue(),
-          longStats.getSumValue(), stats.getStartTime(), stats.getEndTime());
+      updateStats(longStats.getMinValue(), longStats.getMaxValue(), longStats.getFirstValue(),
+          longStats.getLastValue(), longStats.sumValue, stats.getStartTime(), stats.getEndTime());
     }
 
   }
@@ -244,7 +250,8 @@ public class LongStatistics extends Statistics<Long> {
 
   @Override
   public String toString() {
-    return super.toString() + " [minValue:" + minValue + ",maxValue:" + maxValue + ",firstValue:" + firstValue +
+    return super.toString() + " [minValue:" + minValue + ",maxValue:" + maxValue + ",firstValue:"
+        + firstValue +
         ",lastValue:" + lastValue + ",sumValue:" + sumValue + "]";
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
index 45f00ae..524ee01 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
@@ -769,7 +769,7 @@ public class ReadWriteIOUtils {
    * read a int + buffer
    */
   public static ByteBuffer readByteBufferWithSelfDescriptionLength(ByteBuffer buffer) {
-    int byteLength = readInt(buffer);
+    int byteLength = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
     byte[] bytes = new byte[byteLength];
     buffer.get(bytes);
     ByteBuffer byteBuffer = ByteBuffer.allocate(byteLength);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/ForceAppendTsFileWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/ForceAppendTsFileWriter.java
index 9dfccf8..6625f68 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/ForceAppendTsFileWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/ForceAppendTsFileWriter.java
@@ -66,9 +66,7 @@ public class ForceAppendTsFileWriter extends TsFileIOWriter {
 
       canWrite = true;
       versionInfo = tsFileMetadata.getVersionInfo();
-      totalChunkNum = tsFileMetadata.getTotalChunkNum();
-      invalidChunkNum = tsFileMetadata.getInvalidChunkNum();
-      
+
       List<String> devices = reader.getAllDevices();
       for (String device : devices) {
         List<ChunkMetadata> chunkMetadataList = new ArrayList<>();


[iotdb] 07/11: debug

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit e84b33e88c80e517c376d2598c6751d1cb211451
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Fri Nov 27 09:53:33 2020 +0800

    debug
---
 .../iotdb/tsfile/file/header/ChunkGroupHeader.java      | 10 ++++------
 .../apache/iotdb/tsfile/file/header/ChunkHeader.java    | 17 +++++++----------
 .../iotdb/tsfile/file/metadata/TimeseriesMetadata.java  |  4 ++--
 .../org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java |  2 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java       |  3 ++-
 5 files changed, 16 insertions(+), 20 deletions(-)

diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java
index 4b0a424..429a9b4 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java
@@ -25,6 +25,7 @@ import java.io.OutputStream;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.file.MetaMarker;
 import org.apache.iotdb.tsfile.read.reader.TsFileInput;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class ChunkGroupHeader {
@@ -43,15 +44,12 @@ public class ChunkGroupHeader {
    */
   public ChunkGroupHeader(String deviceID) {
     this.deviceID = deviceID;
-    this.serializedSize = Byte.BYTES + Integer.BYTES + deviceID.getBytes(TSFileConfig.STRING_CHARSET).length;
+    this.serializedSize = getSerializedSize(deviceID);
   }
 
   public static int getSerializedSize(String deviceID) {
-    return Byte.BYTES + Integer.BYTES + getSerializedSize(deviceID.length());
-  }
-
-  private static int getSerializedSize(int deviceIdLength) {
-    return deviceIdLength + Long.BYTES + Integer.BYTES;
+    int length = deviceID.getBytes(TSFileConfig.STRING_CHARSET).length;
+    return Byte.BYTES + ReadWriteForEncodingUtils.varIntSize(length) + length;
   }
 
   /**
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
index 6f9beac..76c1813 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkHeader.java
@@ -111,7 +111,7 @@ public class ChunkHeader {
       throws IOException {
     // read measurementID
     String measurementID = ReadWriteIOUtils.readVarIntString(inputStream);
-    int dataSize = ReadWriteForEncodingUtils.readVarInt(inputStream);
+    int dataSize = ReadWriteForEncodingUtils.readUnsignedVarInt(inputStream);
     TSDataType dataType = ReadWriteIOUtils.readDataType(inputStream);
     CompressionType type = ReadWriteIOUtils.readCompressionType(inputStream);
     TSEncoding encoding = ReadWriteIOUtils.readEncoding(inputStream);
@@ -136,9 +136,8 @@ public class ChunkHeader {
 
     byte chunkType = buffer.get();
     // read measurementID
-    int size = ReadWriteForEncodingUtils.readVarInt(buffer);
-    String measurementID = ReadWriteIOUtils.readStringWithLength(buffer, size);
-    int dataSize = ReadWriteForEncodingUtils.readVarInt(buffer);
+    String measurementID = ReadWriteIOUtils.readVarIntString(buffer);
+    int dataSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
     TSDataType dataType = ReadWriteIOUtils.readDataType(buffer);
     CompressionType type = ReadWriteIOUtils.readCompressionType(buffer);
     TSEncoding encoding = ReadWriteIOUtils.readEncoding(buffer);
@@ -174,10 +173,9 @@ public class ChunkHeader {
   public int serializeTo(OutputStream outputStream) throws IOException {
     int length = 0;
     length += ReadWriteIOUtils.write(chunkType, outputStream);
-    length += ReadWriteIOUtils.write(measurementID, outputStream);
-    length += ReadWriteIOUtils.write(dataSize, outputStream);
+    length += ReadWriteIOUtils.writeVar(measurementID, outputStream);
+    length += ReadWriteForEncodingUtils.writeUnsignedVarInt(dataSize, outputStream);
     length += ReadWriteIOUtils.write(dataType, outputStream);
-    length += ReadWriteIOUtils.write(numOfPages, outputStream);
     length += ReadWriteIOUtils.write(compressionType, outputStream);
     length += ReadWriteIOUtils.write(encodingType, outputStream);
     return length;
@@ -192,10 +190,9 @@ public class ChunkHeader {
   public int serializeTo(ByteBuffer buffer) {
     int length = 0;
     length += ReadWriteIOUtils.write(chunkType, buffer);
-    length += ReadWriteIOUtils.write(measurementID, buffer);
-    length += ReadWriteIOUtils.write(dataSize, buffer);
+    length += ReadWriteIOUtils.writeVar(measurementID, buffer);
+    length += ReadWriteForEncodingUtils.writeUnsignedVarInt(dataSize, buffer);
     length += ReadWriteIOUtils.write(dataType, buffer);
-    length += ReadWriteIOUtils.write(numOfPages, buffer);
     length += ReadWriteIOUtils.write(compressionType, buffer);
     length += ReadWriteIOUtils.write(encodingType, buffer);
     return length;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
index 99b89e0..4f93f17 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
@@ -83,7 +83,7 @@ public class TimeseriesMetadata implements Accountable {
   public static TimeseriesMetadata deserializeFrom(ByteBuffer buffer) {
     TimeseriesMetadata timeseriesMetaData = new TimeseriesMetadata();
     timeseriesMetaData.setTimeSeriesMetadataType(ReadWriteIOUtils.readByte(buffer));
-    timeseriesMetaData.setMeasurementId(ReadWriteIOUtils.readString(buffer));
+    timeseriesMetaData.setMeasurementId(ReadWriteIOUtils.readVarIntString(buffer));
     timeseriesMetaData.setTSDataType(ReadWriteIOUtils.readDataType(buffer));
     timeseriesMetaData.setOffsetOfChunkMetaDataList(ReadWriteIOUtils.readLong(buffer));
     timeseriesMetaData
@@ -102,7 +102,7 @@ public class TimeseriesMetadata implements Accountable {
   public int serializeTo(OutputStream outputStream) throws IOException {
     int byteLen = 0;
     byteLen += ReadWriteIOUtils.write(timeSeriesMetadataType, outputStream);
-    byteLen += ReadWriteIOUtils.write(measurementId, outputStream);
+    byteLen += ReadWriteIOUtils.writeVar(measurementId, outputStream);
     byteLen += ReadWriteIOUtils.write(dataType, outputStream);
     byteLen += ReadWriteIOUtils.write(startOffsetOfChunkMetaDataList, outputStream);
     byteLen += ReadWriteForEncodingUtils
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
index a5b02de..31eb8c7 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
@@ -365,7 +365,7 @@ public class ReadWriteIOUtils {
     }
 
     byte[] bytes = s.getBytes();
-    len += write(bytes.length, outputStream);
+    len += ReadWriteForEncodingUtils.writeVarInt(bytes.length, outputStream);
     outputStream.write(bytes);
     len += bytes.length;
     return len;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index c7eefe0..292e620 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -48,6 +48,7 @@ import org.apache.iotdb.tsfile.read.common.Path;
 import org.apache.iotdb.tsfile.utils.BytesUtils;
 import org.apache.iotdb.tsfile.utils.Pair;
 import org.apache.iotdb.tsfile.utils.PublicBAOS;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 import org.apache.iotdb.tsfile.utils.VersionUtils;
 import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
@@ -252,7 +253,7 @@ public class TsFileIOWriter {
     }
 
     // write TsFileMetaData size
-    ReadWriteIOUtils.write(size, out.wrapAsStream());// write the size of the file metadata.
+    ReadWriteForEncodingUtils.writeUnsignedVarInt(size, out.wrapAsStream());// write the size of the file metadata.
 
     // write magic string
     out.write(magicStringBytes);


[iotdb] 11/11: fix bug

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 0b153b20404515f69889bb77fde579f5804dd6e4
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Wed Dec 2 16:03:52 2020 +0800

    fix bug
---
 .../file/metadata/MetadataIndexConstructor.java    |  8 +++---
 .../tsfile/file/metadata/MetadataIndexNode.java    |  6 ++---
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 29 ++++++++++++++++++----
 .../tsfile/utils/ReadWriteForEncodingUtils.java    |  5 ++--
 .../iotdb/tsfile/utils/ReadWriteIOUtils.java       |  4 +--
 .../tsfile/write/schema/MeasurementSchema.java     | 12 ++++-----
 .../iotdb/tsfile/read/GetAllDevicesTest.java       | 14 +++--------
 .../tsfile/read/TsFileSequenceReaderTest.java      | 13 ++++++----
 .../iotdb/tsfile/write/TsFileIOWriterTest.java     |  7 +++---
 .../iotdb/tsfile/write/writer/PageWriterTest.java  |  4 +--
 .../write/writer/RestorableTsFileIOWriterTest.java |  5 ++--
 11 files changed, 63 insertions(+), 44 deletions(-)

diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java
index d3070be..df0aa4d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java
@@ -26,14 +26,14 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Queue;
 import java.util.TreeMap;
+import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.write.writer.TsFileOutput;
 
 public class MetadataIndexConstructor {
 
-  private static final int MAX_DEGREE_OF_INDEX_NODE = TSFileDescriptor.getInstance().getConfig()
-      .getMaxDegreeOfIndexNode();
+  private static final TSFileConfig config = TSFileDescriptor.getInstance().getConfig();
 
   private MetadataIndexConstructor() {
     throw new IllegalStateException("Utility class");
@@ -62,7 +62,7 @@ public class MetadataIndexConstructor {
       for (int i = 0; i < entry.getValue().size(); i++) {
         timeseriesMetadata = entry.getValue().get(i);
         // when constructing from leaf node, every "degree number of nodes" are related to an entry
-        if (i % MAX_DEGREE_OF_INDEX_NODE == 0) {
+        if (i % config.getMaxDegreeOfIndexNode() == 0) {
           if (currentIndexNode.isFull()) {
             addCurrentIndexNodeToQueue(currentIndexNode, measurementMetadataIndexQueue, out);
             currentIndexNode = new MetadataIndexNode(MetadataIndexNodeType.LEAF_MEASUREMENT);
@@ -78,7 +78,7 @@ public class MetadataIndexConstructor {
     }
 
     // if not exceed the max child nodes num, ignore the device index and directly point to the measurement
-    if (deviceMetadataIndexMap.size() <= MAX_DEGREE_OF_INDEX_NODE) {
+    if (deviceMetadataIndexMap.size() <= config.getMaxDegreeOfIndexNode()) {
       MetadataIndexNode metadataIndexNode = new MetadataIndexNode(
           MetadataIndexNodeType.INTERNAL_MEASUREMENT);
       for (Map.Entry<String, MetadataIndexNode> entry : deviceMetadataIndexMap.entrySet()) {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java
index 0600fd4..f3ff04f 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexNode.java
@@ -24,6 +24,7 @@ import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
+import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.utils.Pair;
@@ -32,8 +33,7 @@ import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class MetadataIndexNode {
 
-  private static final int MAX_DEGREE_OF_INDEX_NODE = TSFileDescriptor.getInstance().getConfig()
-      .getMaxDegreeOfIndexNode();
+  private static final TSFileConfig config = TSFileDescriptor.getInstance().getConfig();
   private List<MetadataIndexEntry> children;
   private long endOffset;
 
@@ -76,7 +76,7 @@ public class MetadataIndexNode {
   }
 
   boolean isFull() {
-    return children.size() == MAX_DEGREE_OF_INDEX_NODE;
+    return children.size() == config.getMaxDegreeOfIndexNode();
   }
 
   MetadataIndexEntry peek() {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index c3d2c5a..d15a194 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -209,8 +209,14 @@ public class TsFileSequenceReader implements AutoCloseable {
    * whether the file is a complete TsFile: only if the head magic and tail magic string exists.
    */
   public boolean isComplete() throws IOException {
-    return tsFileInput.size() >= TSFileConfig.MAGIC_STRING.getBytes().length * 2 + Byte.BYTES
-        && (readTailMagic().equals(readHeadMagic()));
+    long size = tsFileInput.size();
+    if (size >= TSFileConfig.MAGIC_STRING.getBytes().length * 2 + Byte.BYTES) {
+      String tailMagic = readTailMagic();
+      String headMagic = readHeadMagic();
+      return tailMagic.equals(headMagic);
+    } else {
+      return false;
+    }
   }
 
   /**
@@ -999,6 +1005,9 @@ public class TsFileSequenceReader implements AutoCloseable {
             chunkMetadataList.add(currentChunk);
             break;
           case MetaMarker.CHUNK_GROUP_HEADER:
+            // if there is something wrong with the ChunkGroup Header, we will drop this ChunkGroup
+            // because we can not guarantee the correctness of the deviceId.
+            truncatedSize = this.position() - 1;
             if (lastDeviceId != null) {
               // schema of last chunk group
               if (newSchema != null) {
@@ -1011,15 +1020,13 @@ public class TsFileSequenceReader implements AutoCloseable {
               // last chunk group Metadata
               chunkGroupMetadataList.add(new ChunkGroupMetadata(lastDeviceId, chunkMetadataList));
             }
-            // if there is something wrong with the ChunkGroup Footer, we will drop this ChunkGroup
-            // because we can not guarantee the correctness of the deviceId.
-            truncatedSize = this.position() - 1;
             // this is a chunk group
             chunkMetadataList = new ArrayList<>();
             ChunkGroupHeader chunkGroupHeader = this.readChunkGroupHeader();
             lastDeviceId = chunkGroupHeader.getDeviceID();
             break;
           case MetaMarker.VERSION:
+            truncatedSize = this.position() - 1;
             if (lastDeviceId != null) {
               // schema of last chunk group
               if (newSchema != null) {
@@ -1046,6 +1053,18 @@ public class TsFileSequenceReader implements AutoCloseable {
       }
       // now we read the tail of the data section, so we are sure that the last
       // ChunkGroupFooter is complete.
+      if (lastDeviceId != null) {
+        // schema of last chunk group
+        if (newSchema != null) {
+          for (MeasurementSchema tsSchema : measurementSchemaList) {
+            newSchema
+                .putIfAbsent(new Path(lastDeviceId, tsSchema.getMeasurementId()), tsSchema);
+          }
+        }
+        measurementSchemaList = new ArrayList<>();
+        // last chunk group Metadata
+        chunkGroupMetadataList.add(new ChunkGroupMetadata(lastDeviceId, chunkMetadataList));
+      }
       truncatedSize = this.position() - 1;
     } catch (Exception e) {
       logger.info("TsFile {} self-check cannot proceed at position {} " + "recovered, because : {}",
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java
index 578ab4c..d3783df 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteForEncodingUtils.java
@@ -98,10 +98,11 @@ public class ReadWriteForEncodingUtils {
   public static int readUnsignedVarInt(InputStream in) throws IOException {
     int value = 0;
     int i = 0;
-    int b;
-    while (((b = in.read()) & 0x80) != 0) {
+    int b = in.read();
+    while (b != -1 && (b & 0x80) != 0) {
       value |= (b & 0x7F) << i;
       i += 7;
+      b = in.read();
     }
     return value | (b << i);
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
index ad251e5..17c0935 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/utils/ReadWriteIOUtils.java
@@ -390,11 +390,11 @@ public class ReadWriteIOUtils {
 
   public static int writeVar(String s, ByteBuffer buffer) {
     if (s == null) {
-      return write(-1, buffer);
+      return ReadWriteForEncodingUtils.writeVarInt(-1, buffer);
     }
     int len = 0;
     byte[] bytes = s.getBytes();
-    len += write(bytes.length, buffer);
+    len += ReadWriteForEncodingUtils.writeVarInt(bytes.length, buffer);
     buffer.put(bytes);
     len += bytes.length;
     return len;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
index 18127d2..7a6ca04 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
@@ -223,11 +223,11 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
 
     byteLen += ReadWriteIOUtils.write(measurementId, outputStream);
 
-    byteLen += ReadWriteIOUtils.write((short) type, outputStream);
+    byteLen += ReadWriteIOUtils.write(type, outputStream);
 
-    byteLen += ReadWriteIOUtils.write((short) encoding, outputStream);
+    byteLen += ReadWriteIOUtils.write(encoding, outputStream);
 
-    byteLen += ReadWriteIOUtils.write((short) compressor, outputStream);
+    byteLen += ReadWriteIOUtils.write(compressor, outputStream);
 
     if (props == null) {
       byteLen += ReadWriteIOUtils.write(0, outputStream);
@@ -250,11 +250,11 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
 
     byteLen += ReadWriteIOUtils.write(measurementId, buffer);
 
-    byteLen += ReadWriteIOUtils.write((short) type, buffer);
+    byteLen += ReadWriteIOUtils.write(type, buffer);
 
-    byteLen += ReadWriteIOUtils.write((short) encoding, buffer);
+    byteLen += ReadWriteIOUtils.write(encoding, buffer);
 
-    byteLen += ReadWriteIOUtils.write((short) compressor, buffer);
+    byteLen += ReadWriteIOUtils.write(compressor, buffer);
 
     if (props == null) {
       byteLen += ReadWriteIOUtils.write(0, buffer);
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/GetAllDevicesTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/GetAllDevicesTest.java
index 05add5c..61c6465 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/GetAllDevicesTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/GetAllDevicesTest.java
@@ -42,7 +42,7 @@ public class GetAllDevicesTest {
   }
 
   @After
-  public void after() throws IOException {
+  public void after() {
     FileGenerator.after();
     conf.setMaxDegreeOfIndexNode(maxDegreeOfIndexNode);
   }
@@ -70,19 +70,13 @@ public class GetAllDevicesTest {
   public void testGetAllDevices(int deviceNum, int measurementNum) throws IOException {
     FileGenerator.generateFile(10000, deviceNum, measurementNum);
     try (TsFileSequenceReader fileReader = new TsFileSequenceReader(FILE_PATH)) {
-      ReadOnlyTsFile tsFile = new ReadOnlyTsFile(fileReader);
-  
-      // test
-      try (TsFileSequenceReader reader = new TsFileSequenceReader(FILE_PATH)) {
-        List<String> devices = reader.getAllDevices();
+
+        List<String> devices = fileReader.getAllDevices();
         Assert.assertEquals(deviceNum, devices.size());
         for (int i = 0; i < deviceNum; i++) {
           Assert.assertTrue(devices.contains("d" + i));
         }
-      }
-  
-      // after
-      tsFile.close();
+
       FileGenerator.after();
     }
   }
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
index 4e8b7e0..1e67454 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
@@ -20,6 +20,7 @@
 package org.apache.iotdb.tsfile.read;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -74,19 +75,21 @@ public class TsFileSequenceReaderTest {
         case MetaMarker.CHUNK_HEADER:
         case MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER:
           ChunkHeader header = reader.readChunkHeader(marker);
-          for (int j = 0; j < header.getNumOfPages(); j++) {
+          int dataSize = header.getDataSize();
+          while (dataSize > 0) {
             PageHeader pageHeader = reader.readPageHeader(header.getDataType(),
                 header.getChunkType() == MetaMarker.CHUNK_HEADER);
-            reader.readPage(pageHeader, header.getCompressionType());
+            ByteBuffer pageData = reader.readPage(pageHeader, header.getCompressionType());
+            dataSize -= pageHeader.getSerializedPageSize();
           }
           break;
         case MetaMarker.CHUNK_GROUP_HEADER:
-          ChunkGroupHeader footer = reader.readChunkGroupHeader();
+          ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupHeader();
           long endOffset = reader.position();
           Pair<Long, Long> pair = new Pair<>(startOffset, endOffset);
-          deviceChunkGroupMetadataOffsets.putIfAbsent(footer.getDeviceID(), new ArrayList<>());
+          deviceChunkGroupMetadataOffsets.putIfAbsent(chunkGroupHeader.getDeviceID(), new ArrayList<>());
           List<Pair<Long, Long>> metadatas = deviceChunkGroupMetadataOffsets
-              .get(footer.getDeviceID());
+              .get(chunkGroupHeader.getDeviceID());
           metadatas.add(pair);
           startOffset = endOffset;
           break;
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
index 675a77c..a68ba9b 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
@@ -87,13 +87,14 @@ public class TsFileIOWriterTest {
     Assert.assertEquals(TSFileConfig.VERSION_NUMBER, reader.readVersionNumber());
     Assert.assertEquals(TSFileConfig.MAGIC_STRING, reader.readTailMagic());
 
+    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + 1);
+
     // chunk group header
     Assert.assertEquals(MetaMarker.CHUNK_GROUP_HEADER, reader.readMarker());
-    ChunkGroupHeader footer = reader.readChunkGroupHeader();
-    Assert.assertEquals(deviceId, footer.getDeviceID());
+    ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupHeader();
+    Assert.assertEquals(deviceId, chunkGroupHeader.getDeviceID());
 
     // chunk header
-    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + 1);
     Assert.assertEquals(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER, reader.readMarker());
     ChunkHeader header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
     Assert.assertEquals(TimeSeriesMetadataTest.measurementUID, header.getMeasurementID());
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java
index 3253495..df3a57a 100755
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/PageWriterTest.java
@@ -46,7 +46,7 @@ public class PageWriterTest {
     int timeCount = 0;
     try {
       writer.write(timeCount++, value);
-      assertEquals(12, writer.estimateMaxMemSize());
+      assertEquals(9, writer.estimateMaxMemSize());
       ByteBuffer buffer1 = writer.getUncompressedBytes();
       ByteBuffer buffer = ByteBuffer.wrap(buffer1.array());
       writer.reset(new MeasurementSchema("s0", TSDataType.INT32, TSEncoding.RLE));
@@ -165,7 +165,7 @@ public class PageWriterTest {
     int timeCount = 0;
     try {
       writer.write(timeCount++, new Binary(value));
-      assertEquals(26, writer.estimateMaxMemSize());
+      assertEquals(23, writer.estimateMaxMemSize());
       ByteBuffer buffer1 = writer.getUncompressedBytes();
       ByteBuffer buffer = ByteBuffer.wrap(buffer1.array());
       writer.reset(new MeasurementSchema("s0", TSDataType.INT64, TSEncoding.RLE));
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java
index 68c46dc..0a59614 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/writer/RestorableTsFileIOWriterTest.java
@@ -85,8 +85,7 @@ public class RestorableTsFileIOWriterTest {
     RestorableTsFileIOWriter rWriter = new RestorableTsFileIOWriter(file);
     writer = new TsFileWriter(rWriter);
     writer.close();
-    assertEquals(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
-        .getBytes().length, rWriter.getTruncatedSize());
+    assertEquals(TSFileConfig.MAGIC_STRING.getBytes().length + 1, rWriter.getTruncatedSize());
 
     rWriter = new RestorableTsFileIOWriter(file);
     assertEquals(TsFileCheckStatus.COMPLETE_FILE, rWriter.getTruncatedSize());
@@ -128,6 +127,7 @@ public class RestorableTsFileIOWriterTest {
   public void testOnlyOneChunkHeader() throws Exception {
     File file = new File(FILE_NAME);
     TsFileWriter writer = new TsFileWriter(file);
+    writer.getIOWriter().startChunkGroup("root.sg1.d1");
     writer.getIOWriter()
         .startFlushChunk(new MeasurementSchema("s1", TSDataType.FLOAT, TSEncoding.PLAIN),
             CompressionType.SNAPPY, TSDataType.FLOAT, TSEncoding.PLAIN, new FloatStatistics(), 100,
@@ -181,6 +181,7 @@ public class RestorableTsFileIOWriterTest {
     writer.write(new TSRecord(2, "d1").addTuple(new FloatDataPoint("s1", 5))
         .addTuple(new FloatDataPoint("s2", 4)));
     writer.flushAllChunkGroups();
+    writer.writeVersion(0);
     writer.getIOWriter().close();
     RestorableTsFileIOWriter rWriter = new RestorableTsFileIOWriter(file);
     writer = new TsFileWriter(rWriter);


[iotdb] 04/11: next is to delete Edian type

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackietien pushed a commit to branch NewTsFile
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 6b8a0608187ca96c8a8df481cbcd836c32f7a3c2
Author: JackieTien97 <Ja...@foxmail.com>
AuthorDate: Thu Nov 26 09:50:13 2020 +0800

    next is to delete Edian type
---
 .../apache/iotdb/tsfile/TsFileSequenceRead.java    |  5 ++-
 .../apache/iotdb/db/tools/TsFileSketchTool.java    | 11 +------
 .../tsfile/encoding/encoder/PlainEncoder.java      |  1 +
 .../file/{footer => header}/ChunkGroupHeader.java  |  2 +-
 .../iotdb/tsfile/file/header/package-info.java     | 30 ------------------
 .../iotdb/tsfile/file/metadata/ChunkMetadata.java  | 22 +++++++------
 .../tsfile/file/metadata/TimeseriesMetadata.java   | 23 ++++++++++++--
 .../tsfile/file/metadata/enums/TSEncoding.java     | 36 ----------------------
 .../iotdb/tsfile/read/TsFileSequenceReader.java    |  9 ++----
 .../tsfile/write/schema/MeasurementSchema.java     |  2 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  | 22 +++++++------
 .../tsfile/read/TsFileSequenceReaderTest.java      |  5 ++-
 .../iotdb/tsfile/write/TsFileIOWriterTest.java     | 19 ++++++------
 13 files changed, 66 insertions(+), 121 deletions(-)

diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
index 818ac3b..f837b4b 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/TsFileSequenceRead.java
@@ -26,7 +26,7 @@ import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.encoding.decoder.Decoder;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
+import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
@@ -55,8 +55,7 @@ public class TsFileSequenceRead {
       // first SeriesChunks (headers and data) in one ChunkGroup, then the CHUNK_GROUP_FOOTER
       // Because we do not know how many chunks a ChunkGroup may have, we should read one byte (the marker) ahead and
       // judge accordingly.
-      reader.position((long) TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
-              .getBytes().length);
+      reader.position((long) TSFileConfig.MAGIC_STRING.getBytes().length + 1);
       System.out.println("[Chunk Group]");
       System.out.println("position: " + reader.position());
       byte marker;
diff --git a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
index ca1b26c..779fb56 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
@@ -28,7 +28,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
+import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexEntry;
@@ -109,10 +109,6 @@ public class TsFileSketchTool {
           printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 0");
           printlnBoth(pw,
                   String.format("%20s", "") + "|\t\t[deviceID] " + chunkGroupHeader.getDeviceID());
-          printlnBoth(pw,
-                  String.format("%20s", "") + "|\t\t[dataSize] " + chunkGroupHeader.getDataSize());
-          printlnBoth(pw, String.format("%20s", "") + "|\t\t[num of chunks] " + chunkGroupHeader
-                  .getNumberOfChunks());
           printlnBoth(pw, str1.toString() + "\t[Chunk Group] of "
                   + chunkGroupMetadata.getDevice() + " ends");
           // versionInfo begins if there is a versionInfo
@@ -166,11 +162,6 @@ public class TsFileSketchTool {
         printlnBoth(pw,
                 String.format("%20s", "") + "|\t\t" + tsFileMetaData.getMetadataIndex().getChildren()
                         .size() + " key&TsMetadataIndex");
-        printlnBoth(pw,
-                String.format("%20s", "") + "|\t\t[totalChunkNum] " + tsFileMetaData.getTotalChunkNum());
-        printlnBoth(pw,
-                String.format("%20s", "") + "|\t\t[invalidChunkNum] " + tsFileMetaData
-                        .getInvalidChunkNum());
 
         // bloom filter
         BloomFilter bloomFilter = tsFileMetaData.getBloomFilter();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
index d98d9c2..d30bcc8 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/encoding/encoder/PlainEncoder.java
@@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.math.BigDecimal;
 
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupHeader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java
similarity index 98%
rename from tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupHeader.java
rename to tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java
index 66afbd8..4b0a424 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/footer/ChunkGroupHeader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/ChunkGroupHeader.java
@@ -17,7 +17,7 @@
  * under the License.
  */
 
-package org.apache.iotdb.tsfile.file.footer;
+package org.apache.iotdb.tsfile.file.header;
 
 import java.io.IOException;
 import java.io.InputStream;
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/package-info.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/package-info.java
deleted file mode 100644
index 9c1701a..0000000
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/header/package-info.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.iotdb.tsfile.file.header;
-
-/**
- * CHUNK_GROUP_FOOTER and CHUNK_HEADER are used for parsing file.
- *
- * ChunkGroupMetadata and ChunkMetadata are used for locating the positions of
- * ChunkGroup (footer) and chunk (header),filtering data quickly, and thereby
- * they have statistics information.
- *
- * However, because Page has only the header structure, therefore, PageHeader
- * has the both two functions.
- */
\ No newline at end of file
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java
index 6f494b3..083c9e1 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/ChunkMetadata.java
@@ -142,10 +142,12 @@ public class ChunkMetadata implements Accountable {
    * @return length
    * @throws IOException IOException
    */
-  public int serializeTo(OutputStream outputStream) throws IOException {
+  public int serializeTo(OutputStream outputStream, boolean serializeStatistic) throws IOException {
     int byteLen = 0;
     byteLen += ReadWriteIOUtils.write(offsetOfChunkHeader, outputStream);
-    byteLen += statistics.serialize(outputStream);
+    if (serializeStatistic) {
+      byteLen += statistics.serialize(outputStream);
+    }
     return byteLen;
   }
 
@@ -153,19 +155,19 @@ public class ChunkMetadata implements Accountable {
    * deserialize from ByteBuffer.
    *
    * @param buffer          ByteBuffer
-   * @param measurementUid  measurementUid of this chunk metadata
-   * @param tsDataType      data type of this chunk metadata
    * @return ChunkMetaData object
    */
-  public static ChunkMetadata deserializeFrom(ByteBuffer buffer, String measurementUid,
-      TSDataType tsDataType) {
+  public static ChunkMetadata deserializeFrom(ByteBuffer buffer, TimeseriesMetadata timeseriesMetadata) {
     ChunkMetadata chunkMetaData = new ChunkMetadata();
 
-    chunkMetaData.measurementUid = measurementUid;
-    chunkMetaData.tsDataType = tsDataType;
+    chunkMetaData.measurementUid = timeseriesMetadata.getMeasurementId();
+    chunkMetaData.tsDataType = timeseriesMetadata.getTSDataType();
     chunkMetaData.offsetOfChunkHeader = ReadWriteIOUtils.readLong(buffer);
-    chunkMetaData.statistics = Statistics.deserialize(buffer, chunkMetaData.tsDataType);
-
+    if (timeseriesMetadata.getTimeSeriesMetadataType() != 0) {
+      chunkMetaData.statistics = Statistics.deserialize(buffer, chunkMetaData.tsDataType);
+    } else {
+      chunkMetaData.statistics = timeseriesMetadata.getStatistics();
+    }
     return chunkMetaData;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
index 6188e1d..99b89e0 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TimeseriesMetadata.java
@@ -32,6 +32,12 @@ import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 public class TimeseriesMetadata implements Accountable {
 
+  /**
+   * 0 means this time series has only one chunk, no need to save the statistic again in chunk metadata
+   * 1 means this time series has more than one chunk, should save the statistic again in chunk metadata
+   */
+  private byte timeSeriesMetadataType;
+
   private long startOffsetOfChunkMetaDataList;
   private int chunkMetaDataListDataSize;
 
@@ -53,8 +59,10 @@ public class TimeseriesMetadata implements Accountable {
   public TimeseriesMetadata() {
   }
 
-  public TimeseriesMetadata(long startOffsetOfChunkMetaDataList, int chunkMetaDataListDataSize,
-      String measurementId, TSDataType dataType, Statistics statistics) {
+  public TimeseriesMetadata(byte timeSeriesMetadataType, long startOffsetOfChunkMetaDataList,
+      int chunkMetaDataListDataSize, String measurementId, TSDataType dataType,
+      Statistics statistics) {
+    this.timeSeriesMetadataType = timeSeriesMetadataType;
     this.startOffsetOfChunkMetaDataList = startOffsetOfChunkMetaDataList;
     this.chunkMetaDataListDataSize = chunkMetaDataListDataSize;
     this.measurementId = measurementId;
@@ -63,6 +71,7 @@ public class TimeseriesMetadata implements Accountable {
   }
 
   public TimeseriesMetadata(TimeseriesMetadata timeseriesMetadata) {
+    this.timeSeriesMetadataType = timeseriesMetadata.timeSeriesMetadataType;
     this.startOffsetOfChunkMetaDataList = timeseriesMetadata.startOffsetOfChunkMetaDataList;
     this.chunkMetaDataListDataSize = timeseriesMetadata.chunkMetaDataListDataSize;
     this.measurementId = timeseriesMetadata.measurementId;
@@ -73,6 +82,7 @@ public class TimeseriesMetadata implements Accountable {
 
   public static TimeseriesMetadata deserializeFrom(ByteBuffer buffer) {
     TimeseriesMetadata timeseriesMetaData = new TimeseriesMetadata();
+    timeseriesMetaData.setTimeSeriesMetadataType(ReadWriteIOUtils.readByte(buffer));
     timeseriesMetaData.setMeasurementId(ReadWriteIOUtils.readString(buffer));
     timeseriesMetaData.setTSDataType(ReadWriteIOUtils.readDataType(buffer));
     timeseriesMetaData.setOffsetOfChunkMetaDataList(ReadWriteIOUtils.readLong(buffer));
@@ -91,6 +101,7 @@ public class TimeseriesMetadata implements Accountable {
    */
   public int serializeTo(OutputStream outputStream) throws IOException {
     int byteLen = 0;
+    byteLen += ReadWriteIOUtils.write(timeSeriesMetadataType, outputStream);
     byteLen += ReadWriteIOUtils.write(measurementId, outputStream);
     byteLen += ReadWriteIOUtils.write(dataType, outputStream);
     byteLen += ReadWriteIOUtils.write(startOffsetOfChunkMetaDataList, outputStream);
@@ -100,6 +111,14 @@ public class TimeseriesMetadata implements Accountable {
     return byteLen;
   }
 
+  public byte getTimeSeriesMetadataType() {
+    return timeSeriesMetadataType;
+  }
+
+  public void setTimeSeriesMetadataType(byte timeSeriesMetadataType) {
+    this.timeSeriesMetadataType = timeSeriesMetadataType;
+  }
+
   public long getOffsetOfChunkMetaDataList() {
     return startOffsetOfChunkMetaDataList;
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java
index 9232fb9..0d2a5ea 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/enums/TSEncoding.java
@@ -54,42 +54,6 @@ public enum TSEncoding {
     }
   }
 
-  private static TSEncoding getTsEncoding(byte encoding) {
-    if (encoding < 0 || 8 < encoding) {
-      throw new IllegalArgumentException("Invalid input: " + encoding);
-    }
-    switch (encoding) {
-      case 1:
-        return PLAIN_DICTIONARY;
-      case 2:
-        return RLE;
-      case 3:
-        return DIFF;
-      case 4:
-        return TS_2DIFF;
-      case 5:
-        return BITMAP;
-      case 6:
-        return GORILLA_V1;
-      case 7:
-        return REGULAR;
-      case 8:
-        return GORILLA;
-      default:
-        return PLAIN;
-    }
-  }
-
-  /**
-   * give an byte to return a encoding type.
-   *
-   * @param encoding byte number
-   * @return encoding type
-   */
-  public static TSEncoding byteToEnum(byte encoding) {
-    return getTsEncoding(encoding);
-  }
-
   public static int getSerializedSize() {
     return Byte.BYTES;
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index 16e2b71..fd3e50a 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -39,7 +39,7 @@ import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.compress.IUnCompressor;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
+import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
@@ -513,8 +513,7 @@ public class TsFileSequenceReader implements AutoCloseable {
         curSize += timeseriesMetadataMap.get(index).getDataSizeOfChunkMetaDataList();
       }
       ChunkMetadata chunkMetadata = ChunkMetadata
-          .deserializeFrom(buffer, timeseriesMetadataMap.get(index).getMeasurementId(),
-              timeseriesMetadataMap.get(index).getTSDataType());
+          .deserializeFrom(buffer, timeseriesMetadataMap.get(index));
       seriesMetadata.computeIfAbsent(chunkMetadata.getMeasurementUid(), key -> new ArrayList<>())
           .add(chunkMetadata);
     }
@@ -1041,9 +1040,7 @@ public class TsFileSequenceReader implements AutoCloseable {
 
     ByteBuffer buffer = readData(startOffsetOfChunkMetadataList, dataSizeOfChunkMetadataList);
     while (buffer.hasRemaining()) {
-      chunkMetadataList.add(ChunkMetadata
-          .deserializeFrom(buffer, timeseriesMetaData.getMeasurementId(),
-              timeseriesMetaData.getTSDataType()));
+      chunkMetadataList.add(ChunkMetadata.deserializeFrom(buffer, timeseriesMetaData));
     }
 
     VersionUtils.applyVersion(chunkMetadataList, versionInfo);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
index 4ec4a23..18127d2 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/schema/MeasurementSchema.java
@@ -174,7 +174,7 @@ public class MeasurementSchema implements Comparable<MeasurementSchema>, Seriali
   }
 
   public TSEncoding getEncodingType() {
-    return TSEncoding.byteToEnum(encoding);
+    return TSEncoding.deserialize(encoding);
   }
 
   public TSDataType getType() {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index 3c83308..c7eefe0 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -30,7 +30,7 @@ import java.util.TreeMap;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
+import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
@@ -84,7 +84,7 @@ public class TsFileIOWriter {
   private long markedPosition;
   private String currentChunkGroupDeviceId;
   protected List<Pair<Long, Long>> versionInfo = new ArrayList<>();
-  
+
   // for upgrade tool
   Map<String, List<TimeseriesMetadata>> deviceTimeseriesMetadataMap;
 
@@ -153,7 +153,8 @@ public class TsFileIOWriter {
     if (currentChunkGroupDeviceId == null || chunkMetadataList.isEmpty()) {
       return;
     }
-    chunkGroupMetadataList.add(new ChunkGroupMetadata(currentChunkGroupDeviceId, chunkMetadataList));
+    chunkGroupMetadataList
+        .add(new ChunkGroupMetadata(currentChunkGroupDeviceId, chunkMetadataList));
     currentChunkGroupDeviceId = null;
     chunkMetadataList = null;
     out.flush();
@@ -162,11 +163,11 @@ public class TsFileIOWriter {
   /**
    * start a {@linkplain ChunkMetadata ChunkMetaData}.
    *
-   * @param measurementSchema - schema of this time series
+   * @param measurementSchema    - schema of this time series
    * @param compressionCodecName - compression name of this time series
-   * @param tsDataType - data type
-   * @param statistics - Chunk statistics
-   * @param dataSize - the serialized size of all pages
+   * @param tsDataType           - data type
+   * @param statistics           - Chunk statistics
+   * @param dataSize             - the serialized size of all pages
    * @throws IOException if I/O error occurs
    */
   public void startFlushChunk(MeasurementSchema measurementSchema,
@@ -285,15 +286,18 @@ public class TsFileIOWriter {
       Statistics seriesStatistics = Statistics.getStatsByType(dataType);
 
       int chunkMetadataListLength = 0;
+      boolean serializeStatistic = (entry.getValue().size() > 1);
       // flush chunkMetadataList one by one
       for (ChunkMetadata chunkMetadata : entry.getValue()) {
         if (!chunkMetadata.getDataType().equals(dataType)) {
           continue;
         }
-        chunkMetadataListLength += chunkMetadata.serializeTo(out.wrapAsStream());
+        chunkMetadataListLength += chunkMetadata
+            .serializeTo(out.wrapAsStream(), serializeStatistic);
         seriesStatistics.mergeStatistics(chunkMetadata.getStatistics());
       }
-      TimeseriesMetadata timeseriesMetadata = new TimeseriesMetadata(offsetOfChunkMetadataList,
+      TimeseriesMetadata timeseriesMetadata = new TimeseriesMetadata(
+          serializeStatistic ? (byte) 1 : (byte) 0, offsetOfChunkMetadataList,
           chunkMetadataListLength, path.getMeasurement(), dataType, seriesStatistics);
       deviceTimeseriesMetadataMap.computeIfAbsent(device, k -> new ArrayList<>())
           .add(timeseriesMetadata);
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
index 97a7a33..bb86168 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/read/TsFileSequenceReaderTest.java
@@ -30,7 +30,7 @@ import java.util.Set;
 import java.util.stream.Collectors;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
+import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.header.PageHeader;
 import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
@@ -64,8 +64,7 @@ public class TsFileSequenceReaderTest {
   @Test
   public void testReadTsFileSequently() throws IOException {
     TsFileSequenceReader reader = new TsFileSequenceReader(FILE_PATH);
-    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
-        .getBytes().length);
+    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + 1);
     Map<String, List<Pair<Long, Long>>> deviceChunkGroupMetadataOffsets = new HashMap<>();
 
     long startOffset = reader.position();
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
index a5bb8e6..61e4786 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/write/TsFileIOWriterTest.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.constant.TestConstant;
 import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.footer.ChunkGroupHeader;
+import org.apache.iotdb.tsfile.file.footer.ChunkGroupFooter;
 import org.apache.iotdb.tsfile.file.header.ChunkHeader;
 import org.apache.iotdb.tsfile.file.metadata.TimeSeriesMetadataTest;
 import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
@@ -84,21 +84,20 @@ public class TsFileIOWriterTest {
 
     // magic_string
     Assert.assertEquals(TSFileConfig.MAGIC_STRING, reader.readHeadMagic());
-    Assert.assertEquals(TSFileConfig.VERSION_NUMBER_V2, reader.readVersionNumber());
+    Assert.assertEquals(TSFileConfig.VERSION_NUMBER, reader.readVersionNumber());
     Assert.assertEquals(TSFileConfig.MAGIC_STRING, reader.readTailMagic());
 
-    // chunk group header
-    Assert.assertEquals(MetaMarker.CHUNK_GROUP_HEADER, reader.readMarker());
-    ChunkGroupHeader chunkGroupHeader = reader.readChunkGroupFooter();
-    Assert.assertEquals(deviceId, chunkGroupHeader.getDeviceID());
-
     // chunk header
-    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER_V2
+    reader.position(TSFileConfig.MAGIC_STRING.getBytes().length + TSFileConfig.VERSION_NUMBER
         .getBytes().length);
-    Assert.assertEquals(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER, reader.readMarker());
-    ChunkHeader header = reader.readChunkHeader(MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER);
+    Assert.assertEquals(MetaMarker.CHUNK_HEADER, reader.readMarker());
+    ChunkHeader header = reader.readChunkHeader();
     Assert.assertEquals(TimeSeriesMetadataTest.measurementUID, header.getMeasurementID());
 
+    // chunk group footer
+    Assert.assertEquals(MetaMarker.CHUNK_GROUP_FOOTER, reader.readMarker());
+    ChunkGroupFooter footer = reader.readChunkGroupFooter();
+    Assert.assertEquals(deviceId, footer.getDeviceID());
 
     // separator
     Assert.assertEquals(MetaMarker.VERSION, reader.readMarker());