You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by su...@apache.org on 2021/11/18 01:52:22 UTC

[iotdb] branch experimental/index updated (d991bfd -> 8cf5af7)

This is an automated email from the ASF dual-hosted git repository.

sunzesong pushed a change to branch experimental/index
in repository https://gitbox.apache.org/repos/asf/iotdb.git.


    from d991bfd  Experiment 1831
     new a92ee19  sepersate MetadataIndexTree
     new 0969f20  separate BloomFilter
     new 8cf5af7  fix bugs

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 example/tsfile/pom.xml                             |   2 +-
 .../{test1831 => test1835}/TsFileAggregation.java  |  40 +--
 .../TsFileAggregationV2.java}                      |  46 ++--
 .../{test1831 => test1835}/TsFileRawRead.java      |  43 +--
 .../TsFileRawReadV2.java}                          |  47 ++--
 .../{test1832 => test1835}/TsFileSketchTool.java   |   8 +-
 .../TsFileWrite.java}                              |  69 +++--
 .../{test1831 => test1835}/TsFileWriteV2.java      |  67 +++--
 .../file/metadata/MetadataIndexConstructor.java    |  42 +--
 .../{TsFileMetadata.java => TsFileMetadataV2.java} |  22 +-
 .../fileSystem/fsFactory/LocalFSFactory.java       |  20 +-
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 300 +++++++++++++++++++--
 .../tsfile/read/controller/IMetadataQuerier.java   |   2 +
 .../read/controller/MetadataQuerierByFileImpl.java |  68 ++++-
 .../tsfile/read/query/executor/TsFileExecutor.java |   3 +-
 .../apache/iotdb/tsfile/write/TsFileWriter.java    |   4 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  | 140 ++++++++--
 .../iotdb/tsfile/utils/FilePathUtilsTest.java      |   4 +-
 18 files changed, 706 insertions(+), 221 deletions(-)
 copy example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1831 => test1835}/TsFileAggregation.java (63%)
 copy example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1831/TsFileAggregation.java => test1835/TsFileAggregationV2.java} (61%)
 copy example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1831 => test1835}/TsFileRawRead.java (67%)
 copy example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1831/TsFileRawRead.java => test1835/TsFileRawReadV2.java} (65%)
 rename example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1832 => test1835}/TsFileSketchTool.java (98%)
 copy example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1831/TsFileWriteV2.java => test1835/TsFileWrite.java} (59%)
 copy example/tsfile/src/main/java/org/apache/iotdb/tsfile/{test1831 => test1835}/TsFileWriteV2.java (59%)
 copy tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/{TsFileMetadata.java => TsFileMetadataV2.java} (89%)

[iotdb] 02/03: separate BloomFilter

Posted by su...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sunzesong pushed a commit to branch experimental/index
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 0969f20c84d353b40b46fa8b3510b301391b26f1
Author: samperson1997 <sz...@mails.tsinghua.edu.cn>
AuthorDate: Fri Nov 12 15:49:57 2021 +0800

    separate BloomFilter
---
 .../iotdb/tsfile/test1835/TsFileAggregation.java   |  77 ++++++++
 .../iotdb/tsfile/test1835/TsFileAggregationV2.java |  77 ++++++++
 .../iotdb/tsfile/test1835/TsFileRawRead.java       |  88 +++++++++
 .../iotdb/tsfile/test1835/TsFileRawReadV2.java     |  88 +++++++++
 .../iotdb/tsfile/test1835/TsFileSketchToolV2.java  |  96 ++--------
 .../tsfile/file/metadata/TsFileMetadataV2.java     |  32 ++--
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 211 ++++++++++++++++++++-
 .../tsfile/read/controller/IMetadataQuerier.java   |   2 +
 .../read/controller/MetadataQuerierByFileImpl.java |  68 ++++++-
 .../tsfile/read/query/executor/TsFileExecutor.java |   6 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  |  30 +--
 11 files changed, 664 insertions(+), 111 deletions(-)

diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java
new file mode 100644
index 0000000..b4de97d
--- /dev/null
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iotdb.tsfile.test1835;
+
+import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
+import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
+import org.apache.iotdb.tsfile.read.common.Path;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+
+import java.io.IOException;
+
+public class TsFileAggregation {
+
+  private static final String DEVICE1 = "device_";
+  public static int chunkNum;
+  public static int deviceNum = 1;
+  public static int sensorNum = 1;
+  public static int fileNum = 1;
+
+  public static void main(String[] args) throws IOException {
+    long costTime = 0L;
+    Options opts = new Options();
+    //    Option chunkNumOption =
+    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
+    //    opts.addOption(chunkNumOption);
+
+    BasicParser parser = new BasicParser();
+    CommandLine cl;
+    try {
+      cl = parser.parse(opts, args);
+      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    long totalStartTime = System.nanoTime();
+    for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
+      // file path
+      String path =
+          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+              + deviceNum
+              + "/test1.tsfile";
+
+      // aggregation query
+      try (TsFileSequenceReader reader = new TsFileSequenceReader(path)) {
+        Path seriesPath = new Path(DEVICE1, "sensor_1");
+        long startTime = System.nanoTime();
+        TimeseriesMetadata timeseriesMetadata = reader.readTimeseriesMetadata(seriesPath, false);
+        long count = timeseriesMetadata.getStatistics().getCount();
+        costTime += (System.nanoTime() - startTime);
+        System.out.println(count);
+      }
+    }
+    System.out.println(
+        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
+    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+  }
+}
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java
new file mode 100644
index 0000000..8681849
--- /dev/null
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iotdb.tsfile.test1835;
+
+import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
+import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
+import org.apache.iotdb.tsfile.read.common.Path;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+
+import java.io.IOException;
+
+public class TsFileAggregationV2 {
+
+  private static final String DEVICE1 = "device_";
+  public static int chunkNum;
+  public static int deviceNum = 1;
+  public static int sensorNum = 1;
+  public static int fileNum = 1;
+
+  public static void main(String[] args) throws IOException {
+    long costTime = 0L;
+    Options opts = new Options();
+    //    Option chunkNumOption =
+    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
+    //    opts.addOption(chunkNumOption);
+
+    BasicParser parser = new BasicParser();
+    CommandLine cl;
+    try {
+      cl = parser.parse(opts, args);
+      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    long totalStartTime = System.nanoTime();
+    for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
+      // file path
+      String path =
+          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+              + deviceNum
+              + "/test0.tsfile";
+
+      // aggregation query
+      try (TsFileSequenceReader reader = new TsFileSequenceReader(path)) {
+        Path seriesPath = new Path(DEVICE1, "sensor_1");
+        long startTime = System.nanoTime();
+        TimeseriesMetadata timeseriesMetadata = reader.readTimeseriesMetadataV4(seriesPath, false);
+        long count = timeseriesMetadata.getStatistics().getCount();
+        costTime += (System.nanoTime() - startTime);
+        System.out.println(count);
+      }
+    }
+    System.out.println(
+        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
+    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+  }
+}
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java
new file mode 100644
index 0000000..11d54b2
--- /dev/null
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iotdb.tsfile.test1835;
+
+import org.apache.iotdb.tsfile.read.ReadOnlyTsFile;
+import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
+import org.apache.iotdb.tsfile.read.common.Path;
+import org.apache.iotdb.tsfile.read.expression.QueryExpression;
+import org.apache.iotdb.tsfile.read.query.dataset.QueryDataSet;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+public class TsFileRawRead {
+
+  private static final String DEVICE1 = "device_";
+  public static int chunkNum;
+  public static int deviceNum = 1;
+  public static int sensorNum = 1;
+  public static int fileNum = 1;
+
+  public static void main(String[] args) throws IOException {
+    long costTime = 0L;
+    Options opts = new Options();
+    //    Option chunkNumOption =
+    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
+    //    opts.addOption(chunkNumOption);
+
+    BasicParser parser = new BasicParser();
+    CommandLine cl;
+    try {
+      cl = parser.parse(opts, args);
+      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    long totalStartTime = System.nanoTime();
+    for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
+      // file path
+      String path =
+          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+              + deviceNum
+              + "/test1.tsfile";
+
+      // raw data query
+      try (TsFileSequenceReader reader = new TsFileSequenceReader(path);
+          ReadOnlyTsFile readTsFile = new ReadOnlyTsFile(reader)) {
+
+        ArrayList<Path> paths = new ArrayList<>();
+        paths.add(new Path(DEVICE1, "sensor_1"));
+
+        QueryExpression queryExpression = QueryExpression.create(paths, null);
+
+        long startTime = System.nanoTime();
+        QueryDataSet queryDataSet = readTsFile.query(queryExpression);
+        while (queryDataSet.hasNext()) {
+          System.out.println(queryDataSet.next());
+        }
+
+        costTime += (System.nanoTime() - startTime);
+      }
+    }
+    System.out.println(
+        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
+    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+  }
+}
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java
new file mode 100644
index 0000000..2f898dc
--- /dev/null
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.iotdb.tsfile.test1835;
+
+import org.apache.iotdb.tsfile.read.ReadOnlyTsFile;
+import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
+import org.apache.iotdb.tsfile.read.common.Path;
+import org.apache.iotdb.tsfile.read.expression.QueryExpression;
+import org.apache.iotdb.tsfile.read.query.dataset.QueryDataSet;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+public class TsFileRawReadV2 {
+
+  private static final String DEVICE1 = "device_";
+  public static int chunkNum;
+  public static int deviceNum = 1;
+  public static int sensorNum = 1;
+  public static int fileNum = 1;
+
+  public static void main(String[] args) throws IOException {
+    long costTime = 0L;
+    long totalStartTime = System.nanoTime();
+    Options opts = new Options();
+    //    Option chunkNumOption =
+    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
+    //    opts.addOption(chunkNumOption);
+
+    BasicParser parser = new BasicParser();
+    CommandLine cl;
+    try {
+      cl = parser.parse(opts, args);
+      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
+      // file path
+      String path =
+          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+              + deviceNum
+              + "/test0.tsfile";
+
+      // raw data query
+      try (TsFileSequenceReader reader = new TsFileSequenceReader(path);
+          ReadOnlyTsFile readTsFile = new ReadOnlyTsFile(reader)) {
+
+        ArrayList<Path> paths = new ArrayList<>();
+        paths.add(new Path(DEVICE1, "sensor_1"));
+
+        QueryExpression queryExpression = QueryExpression.create(paths, null);
+
+        long startTime = System.nanoTime();
+        QueryDataSet queryDataSet = readTsFile.query(queryExpression);
+        while (queryDataSet.hasNext()) {
+          System.out.println(queryDataSet.next());
+        }
+
+        costTime += (System.nanoTime() - startTime);
+      }
+    }
+    System.out.println(
+        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
+    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+  }
+}
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
index 869435f..c36a085 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
@@ -29,16 +29,14 @@ import org.apache.iotdb.tsfile.file.metadata.IChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexEntry;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexNode;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-import org.apache.iotdb.tsfile.file.metadata.TsFileMetadataV2;
+import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
 import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
 import org.apache.iotdb.tsfile.read.common.Chunk;
 import org.apache.iotdb.tsfile.read.common.Path;
-import org.apache.iotdb.tsfile.read.reader.TsFileInput;
 import org.apache.iotdb.tsfile.utils.BloomFilter;
 import org.apache.iotdb.tsfile.utils.Pair;
-import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 import java.io.FileWriter;
 import java.io.IOException;
@@ -100,7 +98,7 @@ public class TsFileSketchToolV2 {
     printlnBoth(pw, "file length: " + length);
 
     // get metadata information
-    TsFileMetadataV2 tsFileMetaData = reader.readFileMetadataV2();
+    TsFileMetadata tsFileMetaData = reader.readFileMetadataV2();
     List<ChunkGroupMetadata> allChunkGroupMetadata = new ArrayList<>();
     reader.selfCheck(null, allChunkGroupMetadata, false);
 
@@ -111,13 +109,15 @@ public class TsFileSketchToolV2 {
     printChunk(allChunkGroupMetadata);
 
     // metadata begins
-    printlnBoth(pw, String.format("%20s", tsFileMetaData.getMetaOffset()) + "|\t[marker] 2");
-
-    //    System.out.println(reader.getFileMetadataPos());
-
+    if (tsFileMetaData.getMetadataIndex().getChildren().isEmpty()) {
+      printlnBoth(pw, String.format("%20s", reader.getFileMetadataPos() - 1) + "|\t[marker] 2");
+    } else {
+      printlnBoth(
+          pw, String.format("%20s", reader.readFileMetadata().getMetaOffset()) + "|\t[marker] 2");
+    }
     // get all timeseries index
     Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap =
-        reader.getAllTimeseriesMetadataWithOffset(reader.position(), reader.getFileMetadataPos());
+        reader.getAllTimeseriesMetadataWithOffset();
 
     // print timeseries index
     printTimeseriesIndex(timeseriesMetadataMap);
@@ -130,7 +130,7 @@ public class TsFileSketchToolV2 {
         pw,
         "---------------------------- IndexOfTimerseriesIndex Tree -----------------------------");
     // print index tree
-    MetadataIndexNode metadataIndexNode = readMetadataIndex();
+    MetadataIndexNode metadataIndexNode = tsFileMetaData.getMetadataIndex();
     TreeMap<Long, MetadataIndexNode> metadataIndexNodeMap = new TreeMap<>();
     List<String> treeOutputStringBuffer = new ArrayList<>();
     loadIndexTree(metadataIndexNode, metadataIndexNodeMap, treeOutputStringBuffer, 0);
@@ -147,7 +147,7 @@ public class TsFileSketchToolV2 {
     pw.close();
   }
 
-  private void printTsFileMetadata(TsFileMetadataV2 tsFileMetaData) {
+  private void printTsFileMetadata(TsFileMetadata tsFileMetaData) {
     try {
       printlnBoth(pw, String.format("%20s", reader.getFileMetadataPos()) + "|\t[TsFileMetadata]");
       printlnBoth(
@@ -401,83 +401,17 @@ public class TsFileSketchToolV2 {
     pw.println(str);
   }
 
-  private MetadataIndexNode readMetadataIndex() throws IOException {
-    TsFileInput tsFileInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(indexFileName);
-    long totalSize = tsFileInput.size();
-    ByteBuffer lastNodeSizeBuffer = ByteBuffer.allocate(Integer.BYTES);
-    tsFileInput.read(lastNodeSizeBuffer, totalSize - Integer.BYTES);
-    lastNodeSizeBuffer.flip();
-
-    int lastNodeSize = ReadWriteIOUtils.readInt(lastNodeSizeBuffer);
-    ByteBuffer lastNode = ByteBuffer.allocate(lastNodeSize);
-    tsFileInput.read(lastNode, totalSize - lastNodeSize - Integer.BYTES);
-    lastNode.flip();
-    return MetadataIndexNode.deserializeFrom(lastNode);
-  }
-
   private class TsFileSketchToolReader extends TsFileSequenceReader {
     public TsFileSketchToolReader(String file) throws IOException {
       super(file);
     }
-    /**
-     * Traverse the metadata index from MetadataIndexEntry to get TimeseriesMetadatas
-     *
-     * @param metadataIndex MetadataIndexEntry
-     * @param buffer byte buffer
-     * @param deviceId String
-     * @param timeseriesMetadataMap map: deviceId -> timeseriesMetadata list
-     * @param needChunkMetadata deserialize chunk metadata list or not
-     */
-    private void generateMetadataIndexWithOffset(
-        long startOffset,
-        MetadataIndexEntry metadataIndex,
-        ByteBuffer buffer,
-        String deviceId,
-        MetadataIndexNodeType type,
-        Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap,
-        boolean needChunkMetadata)
-        throws IOException {
-      if (type.equals(MetadataIndexNodeType.LEAF_MEASUREMENT)) {
-        while (buffer.hasRemaining()) {
-          long pos = startOffset + buffer.position();
-          TimeseriesMetadata timeseriesMetadata =
-              TimeseriesMetadata.deserializeFrom(buffer, needChunkMetadata);
-          timeseriesMetadataMap.put(
-              pos,
-              new Pair<>(
-                  new Path(deviceId, timeseriesMetadata.getMeasurementId()), timeseriesMetadata));
-        }
-      } else {
-        // deviceId should be determined by LEAF_DEVICE node
-        if (type.equals(MetadataIndexNodeType.LEAF_DEVICE)) {
-          deviceId = metadataIndex.getName();
-        }
-        MetadataIndexNode metadataIndexNode = MetadataIndexNode.deserializeFrom(buffer);
-        int metadataIndexListSize = metadataIndexNode.getChildren().size();
-        for (int i = 0; i < metadataIndexListSize; i++) {
-          long endOffset = metadataIndexNode.getEndOffset();
-          if (i != metadataIndexListSize - 1) {
-            endOffset = metadataIndexNode.getChildren().get(i + 1).getOffset();
-          }
-          ByteBuffer nextBuffer =
-              readData(metadataIndexNode.getChildren().get(i).getOffset(), endOffset);
-          generateMetadataIndexWithOffset(
-              metadataIndexNode.getChildren().get(i).getOffset(),
-              metadataIndexNode.getChildren().get(i),
-              nextBuffer,
-              deviceId,
-              metadataIndexNode.getNodeType(),
-              timeseriesMetadataMap,
-              needChunkMetadata);
-        }
-      }
-    }
 
-    public Map<Long, Pair<Path, TimeseriesMetadata>> getAllTimeseriesMetadataWithOffset(
-        long startOffset, long endOffset) throws IOException {
+    public Map<Long, Pair<Path, TimeseriesMetadata>> getAllTimeseriesMetadataWithOffset()
+        throws IOException {
       Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap = new TreeMap<>();
 
-      ByteBuffer buffer = readData(startOffset, endOffset);
+      // FIXME
+      ByteBuffer buffer = readData(0, 0);
       while (buffer.hasRemaining()) {
         int bufferPos = buffer.position();
         TimeseriesMetadata timeseriesMetaData = TimeseriesMetadata.deserializeFrom(buffer, false);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java
index 64246b7..f3f7129 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java
@@ -31,13 +31,13 @@ import java.nio.ByteBuffer;
 import java.util.Set;
 
 /** TSFileMetaData collects all metadata info and saves in its data structure. */
-public class TsFileMetadataV2 {
+public class TsFileMetadataV2 extends TsFileMetadata {
 
   // bloom filter
   private BloomFilter bloomFilter;
 
-  // offset of MetaMarker.SEPARATOR
-  private long metaOffset;
+  // List of <name, offset, childMetadataIndexType>
+  private MetadataIndexNode metadataIndex;
 
   /**
    * deserialize data from the buffer.
@@ -45,12 +45,11 @@ public class TsFileMetadataV2 {
    * @param buffer -buffer use to deserialize
    * @return -a instance of TsFileMetaData
    */
-  public static TsFileMetadataV2 deserializeFrom(ByteBuffer buffer) {
+  public static TsFileMetadata deserializeFrom(ByteBuffer buffer) {
     TsFileMetadataV2 fileMetaData = new TsFileMetadataV2();
 
-    // metaOffset
-    long metaOffset = ReadWriteIOUtils.readLong(buffer);
-    fileMetaData.setMetaOffset(metaOffset);
+    // metadataIndex
+    fileMetaData.metadataIndex = MetadataIndexNode.deserializeFrom(buffer);
 
     // read bloom filter
     if (buffer.hasRemaining()) {
@@ -78,7 +77,16 @@ public class TsFileMetadataV2 {
    * @return -byte length
    */
   public int serializeTo(OutputStream outputStream) throws IOException {
-    return ReadWriteIOUtils.write(metaOffset, outputStream);
+    int byteLen = 0;
+
+    // metadataIndex
+    if (metadataIndex != null) {
+      byteLen += metadataIndex.serializeTo(outputStream);
+    } else {
+      byteLen += ReadWriteIOUtils.write(0, outputStream);
+    }
+
+    return byteLen;
   }
 
   /**
@@ -116,11 +124,11 @@ public class TsFileMetadataV2 {
     return filter;
   }
 
-  public long getMetaOffset() {
-    return metaOffset;
+  public MetadataIndexNode getMetadataIndex() {
+    return metadataIndex;
   }
 
-  public void setMetaOffset(long metaOffset) {
-    this.metaOffset = metaOffset;
+  public void setMetadataIndex(MetadataIndexNode metadataIndex) {
+    this.metadataIndex = metadataIndex;
   }
 }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index 565d677..bf5856d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -90,6 +90,7 @@ public class TsFileSequenceReader implements AutoCloseable {
       "Something error happened while deserializing MetadataIndexNode of file {}";
   protected String file;
   protected TsFileInput tsFileInput;
+  protected TsFileInput metadataIndexInput;
   protected long fileMetadataPos;
   protected int fileMetadataSize;
   private ByteBuffer markerBuffer = ByteBuffer.allocate(Byte.BYTES);
@@ -127,6 +128,9 @@ public class TsFileSequenceReader implements AutoCloseable {
     }
     this.file = file;
     tsFileInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(file);
+    if (FSFactoryProducer.getFSFactory().getFile(file + ".index").exists()) {
+      metadataIndexInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(file + ".index");
+    }
     try {
       if (loadMetadataSize) {
         loadMetadataSize();
@@ -271,13 +275,27 @@ public class TsFileSequenceReader implements AutoCloseable {
     return tsFileMetaData;
   }
 
-  public TsFileMetadataV2 readFileMetadataV2() throws IOException {
+  public TsFileMetadata readFileMetadataV2() throws IOException {
     try {
-      return TsFileMetadataV2.deserializeFrom(readData(fileMetadataPos, fileMetadataSize));
+      if (tsFileMetaData == null) {
+        long totalSize = metadataIndexInput.size();
+        ByteBuffer rootNodeOffsetBuffer = ByteBuffer.allocate(Long.BYTES);
+        metadataIndexInput.read(rootNodeOffsetBuffer, totalSize - Long.BYTES);
+        rootNodeOffsetBuffer.flip();
+
+        long rootNodeOffset = ReadWriteIOUtils.readLong(rootNodeOffsetBuffer);
+        tsFileMetaData =
+            TsFileMetadataV2.deserializeFrom(
+                readData(
+                    rootNodeOffset,
+                    FSFactoryProducer.getFSFactory().getFile(this.file + ".index").length(),
+                    metadataIndexInput));
+      }
     } catch (BufferOverflowException e) {
       logger.error("Something error happened while reading file metadata of file {}", file);
       throw e;
     }
+    return tsFileMetaData;
   }
 
   /**
@@ -446,6 +464,51 @@ public class TsFileSequenceReader implements AutoCloseable {
     return searchResult >= 0 ? timeseriesMetadataList.get(searchResult) : null;
   }
 
+  public TimeseriesMetadata readTimeseriesMetadataV4(Path path, boolean ignoreNotExists)
+      throws IOException {
+    readFileMetadataV2();
+    MetadataIndexNode deviceMetadataIndexNode = tsFileMetaData.getMetadataIndex();
+    Pair<MetadataIndexEntry, Long> metadataIndexPair =
+        getMetadataAndEndOffset(deviceMetadataIndexNode, path.getDevice(), true, true);
+    if (metadataIndexPair == null) {
+      if (ignoreNotExists) {
+        return null;
+      }
+      throw new IOException("Device {" + path.getDevice() + "} is not in tsFileMetaData");
+    }
+    ByteBuffer buffer =
+        readData(metadataIndexPair.left.getOffset(), metadataIndexPair.right, metadataIndexInput);
+    MetadataIndexNode metadataIndexNode = deviceMetadataIndexNode;
+    if (!metadataIndexNode.getNodeType().equals(MetadataIndexNodeType.LEAF_MEASUREMENT)) {
+      try {
+        metadataIndexNode = MetadataIndexNode.deserializeFrom(buffer);
+      } catch (BufferOverflowException e) {
+        logger.error(METADATA_INDEX_NODE_DESERIALIZE_ERROR, file);
+        throw e;
+      }
+      metadataIndexPair =
+          getMetadataAndEndOffset(metadataIndexNode, path.getMeasurement(), false, false);
+    }
+    if (metadataIndexPair == null) {
+      return null;
+    }
+    List<TimeseriesMetadata> timeseriesMetadataList = new ArrayList<>();
+    buffer = readData(metadataIndexPair.left.getOffset(), metadataIndexPair.right);
+    while (buffer.hasRemaining()) {
+      try {
+        timeseriesMetadataList.add(TimeseriesMetadata.deserializeFrom(buffer, true));
+      } catch (BufferOverflowException e) {
+        logger.error(
+            "Something error happened while deserializing TimeseriesMetadata of file {}", file);
+        throw e;
+      }
+    }
+    // return null if path does not exist in the TsFile
+    int searchResult =
+        binarySearchInTimeseriesMetadataList(timeseriesMetadataList, path.getMeasurement());
+    return searchResult >= 0 ? timeseriesMetadataList.get(searchResult) : null;
+  }
+
   /**
    * Find the leaf node that contains this vector, return all the needed subSensor and time column
    *
@@ -656,6 +719,82 @@ public class TsFileSequenceReader implements AutoCloseable {
     return resultTimeseriesMetadataList;
   }
 
+  public List<TimeseriesMetadata> readTimeseriesMetadataV3(String device, Set<String> measurements)
+      throws IOException {
+    readFileMetadataV2();
+    MetadataIndexNode deviceMetadataIndexNode = tsFileMetaData.getMetadataIndex();
+    Pair<MetadataIndexEntry, Long> metadataIndexPair =
+        getMetadataAndEndOffset(deviceMetadataIndexNode, device, true, false);
+    if (metadataIndexPair == null) {
+      return Collections.emptyList();
+    }
+    List<TimeseriesMetadata> resultTimeseriesMetadataList = new ArrayList<>();
+    List<String> measurementList = new ArrayList<>(measurements);
+    Set<String> measurementsHadFound = new HashSet<>();
+    for (int i = 0; i < measurementList.size(); i++) {
+      if (measurementsHadFound.contains(measurementList.get(i))) {
+        continue;
+      }
+      ByteBuffer buffer =
+          readData(metadataIndexPair.left.getOffset(), metadataIndexPair.right, metadataIndexInput);
+      Pair<MetadataIndexEntry, Long> measurementMetadataIndexPair = metadataIndexPair;
+      List<TimeseriesMetadata> timeseriesMetadataList = new ArrayList<>();
+      MetadataIndexNode metadataIndexNode = deviceMetadataIndexNode;
+      if (!metadataIndexNode.getNodeType().equals(MetadataIndexNodeType.LEAF_MEASUREMENT)) {
+        try {
+          metadataIndexNode = MetadataIndexNode.deserializeFrom(buffer);
+        } catch (BufferOverflowException e) {
+          logger.error(METADATA_INDEX_NODE_DESERIALIZE_ERROR, file);
+          throw e;
+        }
+        measurementMetadataIndexPair =
+            getMetadataAndEndOffset(metadataIndexNode, measurementList.get(i), false, false);
+      }
+      if (measurementMetadataIndexPair == null) {
+        return Collections.emptyList();
+      }
+      buffer =
+          readData(
+              measurementMetadataIndexPair.left.getOffset(), measurementMetadataIndexPair.right);
+      while (buffer.hasRemaining()) {
+        try {
+          timeseriesMetadataList.add(TimeseriesMetadata.deserializeFrom(buffer, true));
+        } catch (BufferOverflowException e) {
+          logger.error(
+              "Something error happened while deserializing TimeseriesMetadata of file {}", file);
+          throw e;
+        }
+      }
+      for (int j = i; j < measurementList.size(); j++) {
+        String current = measurementList.get(j);
+        if (!measurementsHadFound.contains(current)) {
+          int searchResult = binarySearchInTimeseriesMetadataList(timeseriesMetadataList, current);
+          if (searchResult >= 0) {
+            resultTimeseriesMetadataList.add(timeseriesMetadataList.get(searchResult));
+            measurementsHadFound.add(current);
+          }
+        }
+        if (measurementsHadFound.size() == measurements.size()) {
+          return resultTimeseriesMetadataList;
+        }
+      }
+    }
+    return resultTimeseriesMetadataList;
+  }
+
+  public MetadataIndexNode readMetadataIndex() throws IOException {
+    long totalSize = metadataIndexInput.size();
+    ByteBuffer lastNodeSizeBuffer = ByteBuffer.allocate(Integer.BYTES);
+    metadataIndexInput.read(lastNodeSizeBuffer, totalSize - Integer.BYTES);
+    lastNodeSizeBuffer.flip();
+
+    int lastNodeSize = ReadWriteIOUtils.readInt(lastNodeSizeBuffer);
+    ByteBuffer lastNode = ByteBuffer.allocate(lastNodeSize);
+    metadataIndexInput.read(lastNode, totalSize - lastNodeSize - Integer.BYTES);
+    lastNode.flip();
+    return MetadataIndexNode.deserializeFrom(lastNode);
+  }
+
   protected int binarySearchInTimeseriesMetadataList(
       List<TimeseriesMetadata> timeseriesMetadataList, String key) {
     int low = 0;
@@ -705,6 +844,13 @@ public class TsFileSequenceReader implements AutoCloseable {
     return getAllDevices(tsFileMetaData.getMetadataIndex());
   }
 
+  public List<String> getAllDevicesV2() throws IOException {
+    if (tsFileMetaData == null) {
+      readFileMetadataV2();
+    }
+    return getAllDevicesV2(tsFileMetaData.getMetadataIndex());
+  }
+
   private List<String> getAllDevices(MetadataIndexNode metadataIndexNode) throws IOException {
     List<String> deviceList = new ArrayList<>();
     int metadataIndexListSize = metadataIndexNode.getChildren().size();
@@ -739,6 +885,42 @@ public class TsFileSequenceReader implements AutoCloseable {
     return deviceList;
   }
 
+  private List<String> getAllDevicesV2(MetadataIndexNode metadataIndexNode) throws IOException {
+    List<String> deviceList = new ArrayList<>();
+    int metadataIndexListSize = metadataIndexNode.getChildren().size();
+
+    // if metadataIndexNode is LEAF_DEVICE, put all devices in node entry into the list
+    if (metadataIndexNode.getNodeType().equals(MetadataIndexNodeType.LEAF_DEVICE)) {
+      deviceList.addAll(
+          metadataIndexNode.getChildren().stream()
+              .map(MetadataIndexEntry::getName)
+              .collect(Collectors.toList()));
+      return deviceList;
+    }
+
+    for (int i = 0; i < metadataIndexListSize; i++) {
+      long endOffset = metadataIndexNode.getEndOffset();
+      if (i != metadataIndexListSize - 1) {
+        endOffset = metadataIndexNode.getChildren().get(i + 1).getOffset();
+      }
+      ByteBuffer buffer =
+          readData(
+              metadataIndexNode.getChildren().get(i).getOffset(), endOffset, metadataIndexInput);
+      MetadataIndexNode node = MetadataIndexNode.deserializeFrom(buffer);
+      if (node.getNodeType().equals(MetadataIndexNodeType.LEAF_DEVICE)) {
+        // if node in next level is LEAF_DEVICE, put all devices in node entry into the list
+        deviceList.addAll(
+            node.getChildren().stream()
+                .map(MetadataIndexEntry::getName)
+                .collect(Collectors.toList()));
+      } else {
+        // keep traversing
+        deviceList.addAll(getAllDevices(node));
+      }
+    }
+    return deviceList;
+  }
+
   /**
    * read all ChunkMetaDatas of given device
    *
@@ -1128,6 +1310,27 @@ public class TsFileSequenceReader implements AutoCloseable {
     return buffer;
   }
 
+  protected ByteBuffer readData(long position, int size, TsFileInput tsFileInput)
+      throws IOException {
+    ByteBuffer buffer = ByteBuffer.allocate(size);
+    if (position < 0) {
+      if (ReadWriteIOUtils.readAsPossible(tsFileInput, buffer) != size) {
+        throw new IOException("reach the end of the data");
+      }
+    } else {
+      long actualReadSize = ReadWriteIOUtils.readAsPossible(tsFileInput, buffer, position, size);
+      if (actualReadSize != size) {
+        throw new IOException(
+            String.format(
+                "reach the end of the data. Size of data that want to read: %s,"
+                    + "actual read size: %s, position: %s",
+                size, actualReadSize, position));
+      }
+    }
+    buffer.flip();
+    return buffer;
+  }
+
   /**
    * read data from tsFileInput, from the current position (if position = -1), or the given
    * position.
@@ -1141,6 +1344,10 @@ public class TsFileSequenceReader implements AutoCloseable {
     return readData(start, (int) (end - start));
   }
 
+  protected ByteBuffer readData(long start, long end, TsFileInput tsFileInput) throws IOException {
+    return readData(start, (int) (end - start), tsFileInput);
+  }
+
   /** notice, the target bytebuffer are not flipped. */
   public int readRaw(long position, int length, ByteBuffer target) throws IOException {
     return ReadWriteIOUtils.readAsPossible(tsFileInput, target, position, length);
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/IMetadataQuerier.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/IMetadataQuerier.java
index 90a1b2a..25efbad 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/IMetadataQuerier.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/IMetadataQuerier.java
@@ -47,6 +47,8 @@ public interface IMetadataQuerier {
 
   void loadChunkMetaDatasV2(List<Path> paths) throws IOException;
 
+  void loadChunkMetaDatasV3(List<Path> paths) throws IOException;
+
   /**
    * @return the corresponding data type.
    * @throws NoMeasurementException if the measurement not exists.
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/MetadataQuerierByFileImpl.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/MetadataQuerierByFileImpl.java
index 1b234f9..6f96550 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/MetadataQuerierByFileImpl.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/controller/MetadataQuerierByFileImpl.java
@@ -56,7 +56,8 @@ public class MetadataQuerierByFileImpl implements IMetadataQuerier {
   /** Constructor of MetadataQuerierByFileImpl. */
   public MetadataQuerierByFileImpl(TsFileSequenceReader tsFileReader) throws IOException {
     this.tsFileReader = tsFileReader;
-    this.fileMetaData = tsFileReader.readFileMetadata();
+    // FIXME
+    this.fileMetaData = tsFileReader.readFileMetadataV2();
     chunkMetaDataCache =
         new LRUCache<Path, List<ChunkMetadata>>(CACHED_ENTRY_NUMBER) {
           @Override
@@ -220,6 +221,71 @@ public class MetadataQuerierByFileImpl implements IMetadataQuerier {
     }
   }
 
+  public void loadChunkMetaDatasV3(List<Path> paths) throws IOException {
+    // group measurements by device
+    TreeMap<String, Set<String>> deviceMeasurementsMap = new TreeMap<>();
+    for (Path path : paths) {
+      if (!deviceMeasurementsMap.containsKey(path.getDevice())) {
+        deviceMeasurementsMap.put(path.getDevice(), new HashSet<>());
+      }
+      deviceMeasurementsMap.get(path.getDevice()).add(path.getMeasurement());
+    }
+
+    Map<Path, List<ChunkMetadata>> tempChunkMetaDatas = new HashMap<>();
+
+    int count = 0;
+    boolean enough = false;
+
+    for (Map.Entry<String, Set<String>> deviceMeasurements : deviceMeasurementsMap.entrySet()) {
+      if (enough) {
+        break;
+      }
+      String selectedDevice = deviceMeasurements.getKey();
+      // s1, s2, s3
+      Set<String> selectedMeasurements = deviceMeasurements.getValue();
+      List<String> devices = this.tsFileReader.getAllDevicesV2();
+      String[] deviceNames = devices.toArray(new String[0]);
+      if (Arrays.binarySearch(deviceNames, selectedDevice) < 0) {
+        continue;
+      }
+
+      List<TimeseriesMetadata> timeseriesMetaDataList =
+          tsFileReader.readTimeseriesMetadataV3(selectedDevice, selectedMeasurements);
+      List<ChunkMetadata> chunkMetadataList = new ArrayList<>();
+      for (TimeseriesMetadata timeseriesMetadata : timeseriesMetaDataList) {
+        chunkMetadataList.addAll(tsFileReader.readChunkMetaDataList(timeseriesMetadata));
+      }
+      // d1
+      for (ChunkMetadata chunkMetaData : chunkMetadataList) {
+        String currentMeasurement = chunkMetaData.getMeasurementUid();
+
+        // s1
+        if (selectedMeasurements.contains(currentMeasurement)) {
+
+          // d1.s1
+          Path path = new Path(selectedDevice, currentMeasurement);
+
+          // add into tempChunkMetaDatas
+          if (!tempChunkMetaDatas.containsKey(path)) {
+            tempChunkMetaDatas.put(path, new ArrayList<>());
+          }
+          tempChunkMetaDatas.get(path).add(chunkMetaData);
+
+          // check cache size, stop when reading enough
+          count++;
+          if (count == CACHED_ENTRY_NUMBER) {
+            enough = true;
+            break;
+          }
+        }
+      }
+    }
+
+    for (Map.Entry<Path, List<ChunkMetadata>> entry : tempChunkMetaDatas.entrySet()) {
+      chunkMetaDataCache.put(entry.getKey(), entry.getValue());
+    }
+  }
+
   @Override
   public TSDataType getDataType(Path path) throws IOException {
     if (tsFileReader.getChunkMetadataList(path) == null
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java
index 0cac002..b3b43a2 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java
@@ -67,8 +67,10 @@ public class TsFileExecutor implements QueryExecutor {
       queryExpression.setSelectSeries(filteredSeriesPath);
     }
 
-    //    metadataQuerier.loadChunkMetaDatas(queryExpression.getSelectedSeries());
-    metadataQuerier.loadChunkMetaDatasV2(queryExpression.getSelectedSeries());
+    // metadataQuerier.loadChunkMetaDatas(queryExpression.getSelectedSeries());
+    // metadataQuerier.loadChunkMetaDatasV2(queryExpression.getSelectedSeries());
+    metadataQuerier.loadChunkMetaDatasV3(queryExpression.getSelectedSeries());
+
     if (queryExpression.hasQueryFilter()) {
       try {
         IExpression expression = queryExpression.getExpression();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index 3c0185c..bba12dc 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -459,34 +459,38 @@ public class TsFileIOWriter {
     // NOTICE: update here, TsFileMetadataV2 does not have MetadataIndexTree
     // ====================== //
     TsFileMetadataV2 tsFileMetaData = new TsFileMetadataV2();
-    tsFileMetaData.setMetaOffset(metaOffset);
-
     TsFileOutput metadataIndexOutput =
         new LocalTsFileOutput(new FileOutputStream(new File(file.getAbsolutePath() + ".index")));
     MetadataIndexNode metadataIndex =
         flushMetadataIndex(chunkMetadataListMap, vectorToPathsMap, metadataIndexOutput);
-    int lastNodeSize = metadataIndex.serializeTo(metadataIndexOutput.wrapAsStream());
-
-    // write the size of last MetadataIndexNode
-    ReadWriteIOUtils.write(lastNodeSize, metadataIndexOutput.wrapAsStream());
-    metadataIndexOutput.close();
-    // ====================== //
+    tsFileMetaData.setMetadataIndex(metadataIndex);
 
+    long rootNodeOffset = metadataIndexOutput.getPosition();
     // write TsFileMetaData
-    int size = tsFileMetaData.serializeTo(out.wrapAsStream());
+    int size = tsFileMetaData.serializeTo(metadataIndexOutput.wrapAsStream());
     if (logger.isDebugEnabled()) {
-      logger.debug("finish flushing the footer {}, file pos:{}", tsFileMetaData, out.getPosition());
+      logger.debug(
+          "finish flushing the footer {}, file pos:{}",
+          tsFileMetaData,
+          metadataIndexOutput.getPosition());
     }
 
     // write bloom filter
-    size += tsFileMetaData.serializeBloomFilter(out.wrapAsStream(), chunkMetadataListMap.keySet());
+    size +=
+        tsFileMetaData.serializeBloomFilter(
+            metadataIndexOutput.wrapAsStream(), chunkMetadataListMap.keySet());
     if (logger.isDebugEnabled()) {
-      logger.debug("finish flushing the bloom filter file pos:{}", out.getPosition());
+      logger.debug(
+          "finish flushing the bloom filter file pos:{}", metadataIndexOutput.getPosition());
     }
 
     // write TsFileMetaData size
-    ReadWriteIOUtils.write(size, out.wrapAsStream()); // write the size of the file metadata.
+    ReadWriteIOUtils.write(
+        size, metadataIndexOutput.wrapAsStream()); // write the size of the file metadata.
+    ReadWriteIOUtils.write(rootNodeOffset, metadataIndexOutput.wrapAsStream());
 
+    metadataIndexOutput.close();
+    ReadWriteIOUtils.write(metaOffset, out.wrapAsStream());
     // write magic string
     out.write(MAGIC_STRING_BYTES);
 

[iotdb] 03/03: fix bugs

Posted by su...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sunzesong pushed a commit to branch experimental/index
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 8cf5af772b20dda505e954ed6dfbdfb810a2ef04
Author: samperson1997 <sz...@mails.tsinghua.edu.cn>
AuthorDate: Thu Nov 18 09:30:30 2021 +0800

    fix bugs
---
 example/tsfile/pom.xml                             |   2 +-
 .../iotdb/tsfile/test1835/TsFileAggregation.java   |  45 ++-
 .../iotdb/tsfile/test1835/TsFileAggregationV2.java |  47 ++-
 .../iotdb/tsfile/test1835/TsFileRawRead.java       |  47 ++-
 .../iotdb/tsfile/test1835/TsFileRawReadV2.java     |  51 ++-
 .../iotdb/tsfile/test1835/TsFileSketchToolV2.java  | 425 ---------------------
 .../apache/iotdb/tsfile/test1835/TsFileWrite.java  |  67 ++--
 .../{TsFileWrite.java => TsFileWriteV2.java}       |  69 ++--
 .../iotdb/tsfile/read/TsFileSequenceReader.java    | 155 ++++----
 .../tsfile/read/query/executor/TsFileExecutor.java |   3 +-
 .../apache/iotdb/tsfile/write/TsFileWriter.java    |   2 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  | 169 ++++----
 12 files changed, 375 insertions(+), 707 deletions(-)

diff --git a/example/tsfile/pom.xml b/example/tsfile/pom.xml
index ce21451..45a9e33 100644
--- a/example/tsfile/pom.xml
+++ b/example/tsfile/pom.xml
@@ -51,7 +51,7 @@
                         <configuration>
                             <archive>
                                 <manifest>
-                                    <mainClass>org.apache.iotdb.tsfile.test1831.TsFileAggregation</mainClass>
+                                    <mainClass>org.apache.iotdb.tsfile.test1835.TsFileAggregationV2</mainClass>
                                 </manifest>
                             </archive>
                             <descriptorRefs>
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java
index b4de97d..343d2ce 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregation.java
@@ -24,30 +24,38 @@ import org.apache.iotdb.tsfile.read.common.Path;
 
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 
 import java.io.IOException;
 
 public class TsFileAggregation {
 
-  private static final String DEVICE1 = "device_";
-  public static int chunkNum;
-  public static int deviceNum = 1;
-  public static int sensorNum = 1;
-  public static int fileNum = 1;
+  private static final String DEVICE1 = "device_1";
+  public static int deviceNum;
+  public static int sensorNum;
+  public static int fileNum;
 
   public static void main(String[] args) throws IOException {
-    long costTime = 0L;
     Options opts = new Options();
-    //    Option chunkNumOption =
-    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
-    //    opts.addOption(chunkNumOption);
+    Option deviceNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("deviceNum").hasArg().create("d");
+    opts.addOption(deviceNumOption);
+    Option sensorNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("sensorNum").hasArg().create("m");
+    opts.addOption(sensorNumOption);
+    Option fileNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("fileNum").hasArg().create("f");
+    opts.addOption(fileNumOption);
 
     BasicParser parser = new BasicParser();
     CommandLine cl;
     try {
       cl = parser.parse(opts, args);
-      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+      deviceNum = Integer.parseInt(cl.getOptionValue("d"));
+      sensorNum = Integer.parseInt(cl.getOptionValue("m"));
+      fileNum = Integer.parseInt(cl.getOptionValue("f"));
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -56,22 +64,23 @@ public class TsFileAggregation {
     for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
       // file path
       String path =
-          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+          "/data/szs/data/data/sequence/root.sg/1/"
               + deviceNum
-              + "/test1.tsfile";
+              + "."
+              + sensorNum
+              + "/test"
+              + fileIndex
+              + ".tsfile";
 
       // aggregation query
       try (TsFileSequenceReader reader = new TsFileSequenceReader(path)) {
         Path seriesPath = new Path(DEVICE1, "sensor_1");
-        long startTime = System.nanoTime();
         TimeseriesMetadata timeseriesMetadata = reader.readTimeseriesMetadata(seriesPath, false);
         long count = timeseriesMetadata.getStatistics().getCount();
-        costTime += (System.nanoTime() - startTime);
-        System.out.println(count);
       }
     }
-    System.out.println(
-        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
-    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+    long totalTime = (System.nanoTime() - totalStartTime) / 1000_000;
+    System.out.println("Total raw read cost time: " + totalTime + "ms");
+    System.out.println("Average cost time: " + (double) totalTime / (double) fileNum + "ms");
   }
 }
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java
index 8681849..b9b52bf 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileAggregationV2.java
@@ -24,30 +24,38 @@ import org.apache.iotdb.tsfile.read.common.Path;
 
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 
 import java.io.IOException;
 
 public class TsFileAggregationV2 {
 
-  private static final String DEVICE1 = "device_";
-  public static int chunkNum;
-  public static int deviceNum = 1;
-  public static int sensorNum = 1;
-  public static int fileNum = 1;
+  private static final String DEVICE1 = "device_1";
+  public static int deviceNum;
+  public static int sensorNum;
+  public static int fileNum;
 
   public static void main(String[] args) throws IOException {
-    long costTime = 0L;
     Options opts = new Options();
-    //    Option chunkNumOption =
-    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
-    //    opts.addOption(chunkNumOption);
+    Option deviceNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("deviceNum").hasArg().create("d");
+    opts.addOption(deviceNumOption);
+    Option sensorNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("sensorNum").hasArg().create("m");
+    opts.addOption(sensorNumOption);
+    Option fileNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("fileNum").hasArg().create("f");
+    opts.addOption(fileNumOption);
 
     BasicParser parser = new BasicParser();
     CommandLine cl;
     try {
       cl = parser.parse(opts, args);
-      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+      deviceNum = Integer.parseInt(cl.getOptionValue("d"));
+      sensorNum = Integer.parseInt(cl.getOptionValue("m"));
+      fileNum = Integer.parseInt(cl.getOptionValue("f"));
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -56,22 +64,23 @@ public class TsFileAggregationV2 {
     for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
       // file path
       String path =
-          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+          "/data/szs/data/data/sequence/root.sg/0/"
               + deviceNum
-              + "/test0.tsfile";
+              + "."
+              + sensorNum
+              + "/test"
+              + fileIndex
+              + ".tsfile";
 
       // aggregation query
-      try (TsFileSequenceReader reader = new TsFileSequenceReader(path)) {
+      try (TsFileSequenceReader reader = new TsFileSequenceReader(path, false)) {
         Path seriesPath = new Path(DEVICE1, "sensor_1");
-        long startTime = System.nanoTime();
         TimeseriesMetadata timeseriesMetadata = reader.readTimeseriesMetadataV4(seriesPath, false);
         long count = timeseriesMetadata.getStatistics().getCount();
-        costTime += (System.nanoTime() - startTime);
-        System.out.println(count);
       }
     }
-    System.out.println(
-        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
-    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+    long totalTime = (System.nanoTime() - totalStartTime) / 1000_000;
+    System.out.println("Total raw read cost time: " + totalTime + "ms");
+    System.out.println("Average cost time: " + (double) totalTime / (double) fileNum + "ms");
   }
 }
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java
index 11d54b2..b3d1646 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawRead.java
@@ -26,6 +26,8 @@ import org.apache.iotdb.tsfile.read.query.dataset.QueryDataSet;
 
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 
 import java.io.IOException;
@@ -33,24 +35,30 @@ import java.util.ArrayList;
 
 public class TsFileRawRead {
 
-  private static final String DEVICE1 = "device_";
-  public static int chunkNum;
-  public static int deviceNum = 1;
-  public static int sensorNum = 1;
-  public static int fileNum = 1;
+  private static final String DEVICE1 = "device_1";
+  public static int deviceNum;
+  public static int sensorNum;
+  public static int fileNum;
 
   public static void main(String[] args) throws IOException {
-    long costTime = 0L;
     Options opts = new Options();
-    //    Option chunkNumOption =
-    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
-    //    opts.addOption(chunkNumOption);
+    Option deviceNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("deviceNum").hasArg().create("d");
+    opts.addOption(deviceNumOption);
+    Option sensorNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("sensorNum").hasArg().create("m");
+    opts.addOption(sensorNumOption);
+    Option fileNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("fileNum").hasArg().create("f");
+    opts.addOption(fileNumOption);
 
     BasicParser parser = new BasicParser();
     CommandLine cl;
     try {
       cl = parser.parse(opts, args);
-      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+      deviceNum = Integer.parseInt(cl.getOptionValue("d"));
+      sensorNum = Integer.parseInt(cl.getOptionValue("m"));
+      fileNum = Integer.parseInt(cl.getOptionValue("f"));
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -59,9 +67,13 @@ public class TsFileRawRead {
     for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
       // file path
       String path =
-          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+          "/data/szs/data/data/sequence/root.sg/1/"
               + deviceNum
-              + "/test1.tsfile";
+              + "."
+              + sensorNum
+              + "/test"
+              + fileIndex
+              + ".tsfile";
 
       // raw data query
       try (TsFileSequenceReader reader = new TsFileSequenceReader(path);
@@ -72,17 +84,14 @@ public class TsFileRawRead {
 
         QueryExpression queryExpression = QueryExpression.create(paths, null);
 
-        long startTime = System.nanoTime();
         QueryDataSet queryDataSet = readTsFile.query(queryExpression);
         while (queryDataSet.hasNext()) {
-          System.out.println(queryDataSet.next());
+          queryDataSet.next();
         }
-
-        costTime += (System.nanoTime() - startTime);
       }
     }
-    System.out.println(
-        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
-    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+    long totalTime = (System.nanoTime() - totalStartTime) / 1000_000;
+    System.out.println("Total raw read cost time: " + totalTime + "ms");
+    System.out.println("Average cost time: " + (double) totalTime / (double) fileNum + "ms");
   }
 }
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java
index 2f898dc..96b2d59 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileRawReadV2.java
@@ -26,6 +26,8 @@ import org.apache.iotdb.tsfile.read.query.dataset.QueryDataSet;
 
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 
 import java.io.IOException;
@@ -33,38 +35,48 @@ import java.util.ArrayList;
 
 public class TsFileRawReadV2 {
 
-  private static final String DEVICE1 = "device_";
-  public static int chunkNum;
-  public static int deviceNum = 1;
-  public static int sensorNum = 1;
-  public static int fileNum = 1;
+  private static final String DEVICE1 = "device_1";
+  public static int deviceNum;
+  public static int sensorNum;
+  public static int fileNum;
 
   public static void main(String[] args) throws IOException {
-    long costTime = 0L;
-    long totalStartTime = System.nanoTime();
     Options opts = new Options();
-    //    Option chunkNumOption =
-    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
-    //    opts.addOption(chunkNumOption);
+    Option deviceNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("deviceNum").hasArg().create("d");
+    opts.addOption(deviceNumOption);
+    Option sensorNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("sensorNum").hasArg().create("m");
+    opts.addOption(sensorNumOption);
+    Option fileNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("fileNum").hasArg().create("f");
+    opts.addOption(fileNumOption);
 
     BasicParser parser = new BasicParser();
     CommandLine cl;
     try {
       cl = parser.parse(opts, args);
-      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+      deviceNum = Integer.parseInt(cl.getOptionValue("d"));
+      sensorNum = Integer.parseInt(cl.getOptionValue("m"));
+      fileNum = Integer.parseInt(cl.getOptionValue("f"));
     } catch (Exception e) {
       e.printStackTrace();
     }
 
+    long totalStartTime = System.nanoTime();
     for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
       // file path
       String path =
-          "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+          "/data/szs/data/data/sequence/root.sg/0/"
               + deviceNum
-              + "/test0.tsfile";
+              + "."
+              + sensorNum
+              + "/test"
+              + fileIndex
+              + ".tsfile";
 
       // raw data query
-      try (TsFileSequenceReader reader = new TsFileSequenceReader(path);
+      try (TsFileSequenceReader reader = new TsFileSequenceReader(path, false);
           ReadOnlyTsFile readTsFile = new ReadOnlyTsFile(reader)) {
 
         ArrayList<Path> paths = new ArrayList<>();
@@ -72,17 +84,14 @@ public class TsFileRawReadV2 {
 
         QueryExpression queryExpression = QueryExpression.create(paths, null);
 
-        long startTime = System.nanoTime();
         QueryDataSet queryDataSet = readTsFile.query(queryExpression);
         while (queryDataSet.hasNext()) {
-          System.out.println(queryDataSet.next());
+          queryDataSet.next();
         }
-
-        costTime += (System.nanoTime() - startTime);
       }
     }
-    System.out.println(
-        "Total raw read cost time: " + (System.nanoTime() - totalStartTime) / 1000_000 + "ms");
-    System.out.println("Index area cost time: " + costTime / 1000_000 + "ms");
+    long totalTime = (System.nanoTime() - totalStartTime) / 1000_000;
+    System.out.println("Total raw read cost time: " + totalTime + "ms");
+    System.out.println("Average cost time: " + (double) totalTime / (double) fileNum + "ms");
   }
 }
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
deleted file mode 100644
index c36a085..0000000
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.iotdb.tsfile.test1835;
-
-import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
-import org.apache.iotdb.tsfile.file.MetaMarker;
-import org.apache.iotdb.tsfile.file.header.ChunkGroupHeader;
-import org.apache.iotdb.tsfile.file.header.PageHeader;
-import org.apache.iotdb.tsfile.file.metadata.ChunkGroupMetadata;
-import org.apache.iotdb.tsfile.file.metadata.ChunkMetadata;
-import org.apache.iotdb.tsfile.file.metadata.IChunkMetadata;
-import org.apache.iotdb.tsfile.file.metadata.MetadataIndexEntry;
-import org.apache.iotdb.tsfile.file.metadata.MetadataIndexNode;
-import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
-import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
-import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
-import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
-import org.apache.iotdb.tsfile.read.common.Chunk;
-import org.apache.iotdb.tsfile.read.common.Path;
-import org.apache.iotdb.tsfile.utils.BloomFilter;
-import org.apache.iotdb.tsfile.utils.Pair;
-
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-public class TsFileSketchToolV2 {
-
-  private String filename;
-  private String indexFileName;
-  private PrintWriter pw;
-  private TsFileSketchToolReader reader;
-  private TsFileSketchToolReader indexReader;
-  private String splitStr; // for split different part of TsFile
-
-  public static void main(String[] args) throws IOException {
-    String filename = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test0.tsfile";
-    String outFile = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test0.txt";
-    String indexFileName =
-        "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test0.tsfile.index";
-
-    new TsFileSketchToolV2(filename, indexFileName, outFile).run();
-  }
-
-  /**
-   * construct TsFileSketchTool
-   *
-   * @param filename input file path
-   * @param indexFileName index file path
-   * @param outFile output file path
-   */
-  public TsFileSketchToolV2(String filename, String indexFileName, String outFile) {
-    try {
-      this.filename = filename;
-      this.indexFileName = indexFileName;
-      pw = new PrintWriter(new FileWriter(outFile));
-      reader = new TsFileSketchToolReader(filename);
-      indexReader = new TsFileSketchToolReader(indexFileName);
-      StringBuilder str1 = new StringBuilder();
-      for (int i = 0; i < 21; i++) {
-        str1.append("|");
-      }
-      splitStr = str1.toString();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /** entry of tool */
-  public void run() throws IOException {
-    long length = FSFactoryProducer.getFSFactory().getFile(filename).length();
-    printlnBoth(
-        pw, "-------------------------------- TsFile Sketch --------------------------------");
-    printlnBoth(pw, "file path: " + filename);
-    printlnBoth(pw, "file length: " + length);
-
-    // get metadata information
-    TsFileMetadata tsFileMetaData = reader.readFileMetadataV2();
-    List<ChunkGroupMetadata> allChunkGroupMetadata = new ArrayList<>();
-    reader.selfCheck(null, allChunkGroupMetadata, false);
-
-    // print file information
-    printFileInfo();
-
-    // print chunk
-    printChunk(allChunkGroupMetadata);
-
-    // metadata begins
-    if (tsFileMetaData.getMetadataIndex().getChildren().isEmpty()) {
-      printlnBoth(pw, String.format("%20s", reader.getFileMetadataPos() - 1) + "|\t[marker] 2");
-    } else {
-      printlnBoth(
-          pw, String.format("%20s", reader.readFileMetadata().getMetaOffset()) + "|\t[marker] 2");
-    }
-    // get all timeseries index
-    Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap =
-        reader.getAllTimeseriesMetadataWithOffset();
-
-    // print timeseries index
-    printTimeseriesIndex(timeseriesMetadataMap);
-
-    // print TsFile Metadata
-    printTsFileMetadata(tsFileMetaData);
-
-    printlnBoth(pw, String.format("%20s", length) + "|\tEND of TsFile");
-    printlnBoth(
-        pw,
-        "---------------------------- IndexOfTimerseriesIndex Tree -----------------------------");
-    // print index tree
-    MetadataIndexNode metadataIndexNode = tsFileMetaData.getMetadataIndex();
-    TreeMap<Long, MetadataIndexNode> metadataIndexNodeMap = new TreeMap<>();
-    List<String> treeOutputStringBuffer = new ArrayList<>();
-    loadIndexTree(metadataIndexNode, metadataIndexNodeMap, treeOutputStringBuffer, 0);
-
-    // print IndexOfTimerseriesIndex
-    printIndexOfTimerseriesIndex(metadataIndexNodeMap);
-
-    for (String str : treeOutputStringBuffer) {
-      printlnBoth(pw, str);
-    }
-    printlnBoth(
-        pw,
-        "---------------------------------- TsFile Sketch End ----------------------------------");
-    pw.close();
-  }
-
-  private void printTsFileMetadata(TsFileMetadata tsFileMetaData) {
-    try {
-      printlnBoth(pw, String.format("%20s", reader.getFileMetadataPos()) + "|\t[TsFileMetadata]");
-      printlnBoth(
-          pw, String.format("%20s", "") + "|\t\t[meta offset] " + tsFileMetaData.getMetaOffset());
-      // bloom filter
-      BloomFilter bloomFilter = tsFileMetaData.getBloomFilter();
-      printlnBoth(
-          pw,
-          String.format("%20s", "")
-              + "|\t\t[bloom filter bit vector byte array length] "
-              + bloomFilter.serialize().length);
-      printlnBoth(pw, String.format("%20s", "") + "|\t\t[bloom filter bit vector byte array] ");
-      printlnBoth(
-          pw,
-          String.format("%20s", "")
-              + "|\t\t[bloom filter number of bits] "
-              + bloomFilter.getSize());
-      printlnBoth(
-          pw,
-          String.format("%20s", "")
-              + "|\t\t[bloom filter number of hash functions] "
-              + bloomFilter.getHashFunctionSize());
-
-      printlnBoth(
-          pw,
-          String.format("%20s", (reader.getFileMetadataPos() + reader.getFileMetadataSize()))
-              + "|\t[TsFileMetadataSize] "
-              + reader.getFileMetadataSize());
-
-      printlnBoth(
-          pw,
-          String.format("%20s", reader.getFileMetadataPos() + reader.getFileMetadataSize() + 4)
-              + "|\t[magic tail] "
-              + reader.readTailMagic());
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  private void printIndexOfTimerseriesIndex(TreeMap<Long, MetadataIndexNode> metadataIndexNodeMap) {
-    for (Map.Entry<Long, MetadataIndexNode> entry : metadataIndexNodeMap.entrySet()) {
-      printlnBoth(
-          pw,
-          String.format("%20s", entry.getKey())
-              + "|\t[IndexOfTimerseriesIndex Node] type="
-              + entry.getValue().getNodeType());
-      for (MetadataIndexEntry metadataIndexEntry : entry.getValue().getChildren()) {
-        printlnBoth(
-            pw,
-            String.format("%20s", "")
-                + "|\t\t<"
-                + metadataIndexEntry.getName()
-                + ", "
-                + metadataIndexEntry.getOffset()
-                + ">");
-      }
-      printlnBoth(
-          pw,
-          String.format("%20s", "") + "|\t\t<endOffset, " + entry.getValue().getEndOffset() + ">");
-    }
-  }
-
-  private void printFileInfo() {
-    try {
-      printlnBoth(pw, "");
-      printlnBoth(pw, String.format("%20s", "POSITION") + "|\tCONTENT");
-      printlnBoth(pw, String.format("%20s", "--------") + " \t-------");
-      printlnBoth(pw, String.format("%20d", 0) + "|\t[magic head] " + reader.readHeadMagic());
-      printlnBoth(
-          pw,
-          String.format("%20d", TSFileConfig.MAGIC_STRING.getBytes().length)
-              + "|\t[version number] "
-              + reader.readVersionNumber());
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  private void printChunk(List<ChunkGroupMetadata> allChunkGroupMetadata) {
-    try {
-      long nextChunkGroupHeaderPos =
-          (long) TSFileConfig.MAGIC_STRING.getBytes().length + Byte.BYTES;
-      // ChunkGroup begins
-      for (ChunkGroupMetadata chunkGroupMetadata : allChunkGroupMetadata) {
-        printlnBoth(
-            pw,
-            splitStr
-                + "\t[Chunk Group] of "
-                + chunkGroupMetadata.getDevice()
-                + ", num of Chunks:"
-                + chunkGroupMetadata.getChunkMetadataList().size());
-        // chunkGroupHeader begins
-        printlnBoth(pw, String.format("%20s", nextChunkGroupHeaderPos) + "|\t[Chunk Group Header]");
-        ChunkGroupHeader chunkGroupHeader =
-            reader.readChunkGroupHeader(nextChunkGroupHeaderPos, false);
-        printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 0");
-        printlnBoth(
-            pw, String.format("%20s", "") + "|\t\t[deviceID] " + chunkGroupHeader.getDeviceID());
-        // chunk begins
-        for (ChunkMetadata chunkMetadata : chunkGroupMetadata.getChunkMetadataList()) {
-          Chunk chunk = reader.readMemChunk(chunkMetadata);
-          printlnBoth(
-              pw,
-              String.format("%20d", chunkMetadata.getOffsetOfChunkHeader())
-                  + "|\t[Chunk] of "
-                  + chunkMetadata.getMeasurementUid()
-                  + ", numOfPoints:"
-                  + chunkMetadata.getNumOfPoints()
-                  + ", time range:["
-                  + chunkMetadata.getStartTime()
-                  + ","
-                  + chunkMetadata.getEndTime()
-                  + "], tsDataType:"
-                  + chunkMetadata.getDataType()
-                  + ", \n"
-                  + String.format("%20s", "")
-                  + " \t"
-                  + chunkMetadata.getStatistics());
-          printlnBoth(
-              pw,
-              String.format("%20s", "")
-                  + "|\t\t[chunk header] "
-                  + "marker="
-                  + chunk.getHeader().getChunkType()
-                  + ", measurementId="
-                  + chunk.getHeader().getMeasurementID()
-                  + ", dataSize="
-                  + chunk.getHeader().getDataSize()
-                  + ", serializedSize="
-                  + chunk.getHeader().getSerializedSize());
-
-          printlnBoth(pw, String.format("%20s", "") + "|\t\t[chunk] " + chunk.getData());
-          PageHeader pageHeader;
-          if (((byte) (chunk.getHeader().getChunkType() & 0x3F))
-              == MetaMarker.ONLY_ONE_PAGE_CHUNK_HEADER) {
-            pageHeader = PageHeader.deserializeFrom(chunk.getData(), chunkMetadata.getStatistics());
-          } else {
-            pageHeader =
-                PageHeader.deserializeFrom(chunk.getData(), chunk.getHeader().getDataType());
-          }
-          printlnBoth(
-              pw,
-              String.format("%20s", "")
-                  + "|\t\t[page] "
-                  + " CompressedSize:"
-                  + pageHeader.getCompressedSize()
-                  + ", UncompressedSize:"
-                  + pageHeader.getUncompressedSize());
-          nextChunkGroupHeaderPos =
-              chunkMetadata.getOffsetOfChunkHeader()
-                  + chunk.getHeader().getSerializedSize()
-                  + chunk.getHeader().getDataSize();
-        }
-        reader.position(nextChunkGroupHeaderPos);
-        byte marker = reader.readMarker();
-        switch (marker) {
-          case MetaMarker.CHUNK_GROUP_HEADER:
-            // do nothing
-            break;
-          case MetaMarker.OPERATION_INDEX_RANGE:
-            // skip the PlanIndex
-            nextChunkGroupHeaderPos += 16;
-            break;
-        }
-
-        printlnBoth(
-            pw, splitStr + "\t[Chunk Group] of " + chunkGroupMetadata.getDevice() + " ends");
-      }
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  private void printTimeseriesIndex(
-      Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap) {
-    try {
-      for (Map.Entry<Long, Pair<Path, TimeseriesMetadata>> entry :
-          timeseriesMetadataMap.entrySet()) {
-        printlnBoth(
-            pw,
-            String.format("%20s", entry.getKey())
-                + "|\t[TimeseriesIndex] of "
-                + entry.getValue().left
-                + ", tsDataType:"
-                + entry.getValue().right.getTSDataType());
-        for (IChunkMetadata chunkMetadata :
-            reader.getChunkMetadataListV3(entry.getValue().left, false)) {
-          printlnBoth(
-              pw,
-              String.format("%20s", "")
-                  + "|\t\t[ChunkIndex] "
-                  + chunkMetadata.getMeasurementUid()
-                  + ", offset="
-                  + chunkMetadata.getOffsetOfChunkHeader());
-        }
-        printlnBoth(
-            pw,
-            String.format("%20s", "") + "|\t\t[" + entry.getValue().right.getStatistics() + "] ");
-      }
-      printlnBoth(pw, splitStr);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  /**
-   * load by dfs, and sort by TreeMap
-   *
-   * @param metadataIndexNode current node
-   * @param metadataIndexNodeMap result map, key is offset
-   * @param treeOutputStringBuffer result list, string is index tree
-   * @param deep current deep
-   */
-  private void loadIndexTree(
-      MetadataIndexNode metadataIndexNode,
-      TreeMap<Long, MetadataIndexNode> metadataIndexNodeMap,
-      List<String> treeOutputStringBuffer,
-      int deep)
-      throws IOException {
-    StringBuilder tableWriter = new StringBuilder("\t");
-    for (int i = 0; i < deep; i++) {
-      tableWriter.append("\t\t");
-    }
-    treeOutputStringBuffer.add(
-        tableWriter.toString() + "[MetadataIndex:" + metadataIndexNode.getNodeType() + "]");
-    for (int i = 0; i < metadataIndexNode.getChildren().size(); i++) {
-      MetadataIndexEntry metadataIndexEntry = metadataIndexNode.getChildren().get(i);
-
-      treeOutputStringBuffer.add(
-          tableWriter.toString()
-              + "└──────["
-              + metadataIndexEntry.getName()
-              + ","
-              + metadataIndexEntry.getOffset()
-              + "]");
-      if (!metadataIndexNode.getNodeType().equals(MetadataIndexNodeType.LEAF_MEASUREMENT)) {
-        long endOffset = metadataIndexNode.getEndOffset();
-        if (i != metadataIndexNode.getChildren().size() - 1) {
-          endOffset = metadataIndexNode.getChildren().get(i + 1).getOffset();
-        }
-        MetadataIndexNode subNode =
-            indexReader.getMetadataIndexNode(metadataIndexEntry.getOffset(), endOffset);
-        metadataIndexNodeMap.put(metadataIndexEntry.getOffset(), subNode);
-        loadIndexTree(subNode, metadataIndexNodeMap, treeOutputStringBuffer, deep + 1);
-      }
-    }
-  }
-
-  private void printlnBoth(PrintWriter pw, String str) {
-    System.out.println(str);
-    pw.println(str);
-  }
-
-  private class TsFileSketchToolReader extends TsFileSequenceReader {
-    public TsFileSketchToolReader(String file) throws IOException {
-      super(file);
-    }
-
-    public Map<Long, Pair<Path, TimeseriesMetadata>> getAllTimeseriesMetadataWithOffset()
-        throws IOException {
-      Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap = new TreeMap<>();
-
-      // FIXME
-      ByteBuffer buffer = readData(0, 0);
-      while (buffer.hasRemaining()) {
-        int bufferPos = buffer.position();
-        TimeseriesMetadata timeseriesMetaData = TimeseriesMetadata.deserializeFrom(buffer, false);
-        timeseriesMetadataMap.put(
-            reader.position() + bufferPos,
-            new Pair<>(new Path("d1", timeseriesMetaData.getMeasurementId()), timeseriesMetaData));
-      }
-      return timeseriesMetadataMap;
-    }
-  }
-}
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
index aa087b4..d6c64d5 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
@@ -32,6 +32,8 @@ import org.apache.iotdb.tsfile.write.schema.UnaryMeasurementSchema;
 
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 
 import java.io.File;
@@ -42,21 +44,30 @@ import java.util.Random;
  * addMeasurement(MeasurementSchema measurementSchema) throws WriteProcessException
  */
 public class TsFileWrite {
-  public static int deviceNum = 1;
-  public static int sensorNum = 1;
-  public static int fileNum = 1;
+  public static int deviceNum;
+  public static int sensorNum;
+  public static int fileNum;
+  public static int pointNum = 100;
 
   public static void main(String[] args) {
     Options opts = new Options();
-    //    Option chunkNumOption =
-    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
-    //    opts.addOption(chunkNumOption);
+    Option deviceNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("deviceNum").hasArg().create("d");
+    opts.addOption(deviceNumOption);
+    Option sensorNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("sensorNum").hasArg().create("m");
+    opts.addOption(sensorNumOption);
+    Option fileNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("fileNum").hasArg().create("f");
+    opts.addOption(fileNumOption);
 
     BasicParser parser = new BasicParser();
     CommandLine cl;
     try {
       cl = parser.parse(opts, args);
-      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+      deviceNum = Integer.parseInt(cl.getOptionValue("d"));
+      sensorNum = Integer.parseInt(cl.getOptionValue("m"));
+      fileNum = Integer.parseInt(cl.getOptionValue("f"));
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -64,10 +75,12 @@ public class TsFileWrite {
     for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
       try {
         String path =
-            "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+            "/data/szs/data/data/sequence/root.sg/0/"
                 + deviceNum
+                + "."
+                + sensorNum
                 + "/test"
-                + 0
+                + fileIndex
                 + ".tsfile";
         File f = FSFactoryProducer.getFSFactory().getFile(path);
         if (f.exists()) {
@@ -76,23 +89,29 @@ public class TsFileWrite {
 
         try {
           TsFileWriter tsFileWriter = new TsFileWriter(f);
-          // 1000 timeseries
-          for (int i = 1; i <= 1000; i++) {
-            tsFileWriter.registerTimeseries(
-                new Path(Constant.DEVICE_PREFIX, Constant.SENSOR_ + i),
-                new UnaryMeasurementSchema(Constant.SENSOR_ + i, TSDataType.INT64, TSEncoding.RLE));
+          for (int i = 1; i <= deviceNum; i++) {
+            for (int j = 1; j <= sensorNum; j++) {
+              Path path1 = new Path(Constant.DEVICE_PREFIX + i, Constant.SENSOR_ + j);
+              tsFileWriter.registerTimeseries(
+                  path1,
+                  new UnaryMeasurementSchema(
+                      Constant.SENSOR_ + j, TSDataType.INT64, TSEncoding.RLE));
+            }
           }
           // construct TSRecord
-          for (int i = 1; i <= 100; i++) {
-            TSRecord tsRecord = new TSRecord(i, Constant.DEVICE_PREFIX);
-            for (int t = 1; t <= 1000; t++) {
-              DataPoint dPoint1 = new LongDataPoint(Constant.SENSOR_ + t, new Random().nextLong());
-              tsRecord.addTuple(dPoint1);
-            }
-            // write TSRecord
-            tsFileWriter.write(tsRecord);
-            if (i % 100 == 0) {
-              tsFileWriter.flushAllChunkGroups();
+          for (int j = 1; j <= deviceNum; j++) {
+            for (int i = 1; i <= pointNum; i++) {
+              TSRecord tsRecord = new TSRecord(i, Constant.DEVICE_PREFIX + j);
+              for (int t = 1; t <= sensorNum; t++) {
+                DataPoint dPoint1 =
+                    new LongDataPoint(Constant.SENSOR_ + t, new Random().nextLong());
+                tsRecord.addTuple(dPoint1);
+              }
+              // write TSRecord
+              tsFileWriter.write(tsRecord);
+              if (i % 100 == 0) {
+                tsFileWriter.flushAllChunkGroups();
+              }
             }
           }
           tsFileWriter.close();
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWriteV2.java
similarity index 59%
copy from example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
copy to example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWriteV2.java
index aa087b4..ffc8cc0 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWriteV2.java
@@ -32,6 +32,8 @@ import org.apache.iotdb.tsfile.write.schema.UnaryMeasurementSchema;
 
 import org.apache.commons.cli.BasicParser;
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 
 import java.io.File;
@@ -41,22 +43,31 @@ import java.util.Random;
  * An example of writing data with TSRecord to TsFile It uses the interface: public void
  * addMeasurement(MeasurementSchema measurementSchema) throws WriteProcessException
  */
-public class TsFileWrite {
-  public static int deviceNum = 1;
-  public static int sensorNum = 1;
-  public static int fileNum = 1;
+public class TsFileWriteV2 {
+  public static int deviceNum;
+  public static int sensorNum;
+  public static int fileNum;
+  public static int pointNum = 100;
 
   public static void main(String[] args) {
     Options opts = new Options();
-    //    Option chunkNumOption =
-    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
-    //    opts.addOption(chunkNumOption);
+    Option deviceNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("deviceNum").hasArg().create("d");
+    opts.addOption(deviceNumOption);
+    Option sensorNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("sensorNum").hasArg().create("m");
+    opts.addOption(sensorNumOption);
+    Option fileNumOption =
+        OptionBuilder.withArgName("args").withLongOpt("fileNum").hasArg().create("f");
+    opts.addOption(fileNumOption);
 
     BasicParser parser = new BasicParser();
     CommandLine cl;
     try {
       cl = parser.parse(opts, args);
-      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+      deviceNum = 1000; // Integer.parseInt(cl.getOptionValue("d"));
+      sensorNum = 10; // Integer.parseInt(cl.getOptionValue("m"));
+      fileNum = 1; // Integer.parseInt(cl.getOptionValue("f"));
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -64,10 +75,12 @@ public class TsFileWrite {
     for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
       try {
         String path =
-            "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+            "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/0/"
                 + deviceNum
+                + "."
+                + sensorNum
                 + "/test"
-                + 0
+                + fileIndex
                 + ".tsfile";
         File f = FSFactoryProducer.getFSFactory().getFile(path);
         if (f.exists()) {
@@ -76,23 +89,29 @@ public class TsFileWrite {
 
         try {
           TsFileWriter tsFileWriter = new TsFileWriter(f);
-          // 1000 timeseries
-          for (int i = 1; i <= 1000; i++) {
-            tsFileWriter.registerTimeseries(
-                new Path(Constant.DEVICE_PREFIX, Constant.SENSOR_ + i),
-                new UnaryMeasurementSchema(Constant.SENSOR_ + i, TSDataType.INT64, TSEncoding.RLE));
+          for (int i = 1; i <= deviceNum; i++) {
+            for (int j = 1; j <= sensorNum; j++) {
+              Path path1 = new Path(Constant.DEVICE_PREFIX + i, Constant.SENSOR_ + j);
+              tsFileWriter.registerTimeseries(
+                  path1,
+                  new UnaryMeasurementSchema(
+                      Constant.SENSOR_ + j, TSDataType.INT64, TSEncoding.RLE));
+            }
           }
           // construct TSRecord
-          for (int i = 1; i <= 100; i++) {
-            TSRecord tsRecord = new TSRecord(i, Constant.DEVICE_PREFIX);
-            for (int t = 1; t <= 1000; t++) {
-              DataPoint dPoint1 = new LongDataPoint(Constant.SENSOR_ + t, new Random().nextLong());
-              tsRecord.addTuple(dPoint1);
-            }
-            // write TSRecord
-            tsFileWriter.write(tsRecord);
-            if (i % 100 == 0) {
-              tsFileWriter.flushAllChunkGroups();
+          for (int j = 1; j <= deviceNum; j++) {
+            for (int i = 1; i <= pointNum; i++) {
+              TSRecord tsRecord = new TSRecord(i, Constant.DEVICE_PREFIX + j);
+              for (int t = 1; t <= sensorNum; t++) {
+                DataPoint dPoint1 =
+                    new LongDataPoint(Constant.SENSOR_ + t, new Random().nextLong());
+                tsRecord.addTuple(dPoint1);
+              }
+              // write TSRecord
+              tsFileWriter.write(tsRecord);
+              if (i % 100 == 0) {
+                tsFileWriter.flushAllChunkGroups();
+              }
             }
           }
           tsFileWriter.close();
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index bf5856d..712a812 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -103,6 +103,8 @@ public class TsFileSequenceReader implements AutoCloseable {
   private long minPlanIndex = Long.MAX_VALUE;
   private long maxPlanIndex = Long.MIN_VALUE;
 
+  private long startTime;
+
   /**
    * Create a file reader of the given file. The reader will read the tail of the file to get the
    * file metadata size.Then the reader will skip the first
@@ -131,14 +133,14 @@ public class TsFileSequenceReader implements AutoCloseable {
     if (FSFactoryProducer.getFSFactory().getFile(file + ".index").exists()) {
       metadataIndexInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(file + ".index");
     }
-    try {
-      if (loadMetadataSize) {
-        loadMetadataSize();
-      }
-    } catch (Throwable e) {
-      tsFileInput.close();
-      throw e;
-    }
+    //    try {
+    //      if (loadMetadataSize) {
+    //        loadMetadataSize();
+    //      }
+    //    } catch (Throwable e) {
+    //      tsFileInput.close();
+    //      throw e;
+    //    }
   }
 
   // used in merge resource
@@ -195,19 +197,19 @@ public class TsFileSequenceReader implements AutoCloseable {
 
   public void loadMetadataSize() throws IOException {
     ByteBuffer metadataSize = ByteBuffer.allocate(Integer.BYTES);
-    if (readTailMagic().equals(TSFileConfig.MAGIC_STRING)) {
-      tsFileInput.read(
-          metadataSize,
-          tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES);
-      metadataSize.flip();
-      // read file metadata size and position
-      fileMetadataSize = ReadWriteIOUtils.readInt(metadataSize);
-      fileMetadataPos =
-          tsFileInput.size()
-              - TSFileConfig.MAGIC_STRING.getBytes().length
-              - Integer.BYTES
-              - fileMetadataSize;
-    }
+    // if (readTailMagic().equals(TSFileConfig.MAGIC_STRING)) {
+    tsFileInput.read(
+        metadataSize,
+        tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES);
+    metadataSize.flip();
+    // read file metadata size and position
+    fileMetadataSize = ReadWriteIOUtils.readInt(metadataSize);
+    fileMetadataPos =
+        tsFileInput.size()
+            - TSFileConfig.MAGIC_STRING.getBytes().length
+            - Integer.BYTES
+            - fileMetadataSize;
+    // }
   }
 
   public long getFileMetadataPos() {
@@ -265,8 +267,25 @@ public class TsFileSequenceReader implements AutoCloseable {
   public TsFileMetadata readFileMetadata() throws IOException {
     try {
       if (tsFileMetaData == null) {
+        long start = System.nanoTime();
+        ByteBuffer metadataSize = ByteBuffer.allocate(Integer.BYTES);
+        tsFileInput.read(
+            metadataSize,
+            tsFileInput.size() - TSFileConfig.MAGIC_STRING.getBytes().length - Integer.BYTES);
+        metadataSize.flip();
+
+        // read file metadata size and position
+        fileMetadataSize = ReadWriteIOUtils.readInt(metadataSize);
+
+        fileMetadataPos =
+            tsFileInput.size()
+                - TSFileConfig.MAGIC_STRING.getBytes().length
+                - Integer.BYTES
+                - fileMetadataSize;
+        resourceLogger.debug("ReadFileMetadata " + (System.nanoTime() - start) / 1000_000L + " ms");
         tsFileMetaData =
             TsFileMetadata.deserializeFrom(readData(fileMetadataPos, fileMetadataSize));
+        resourceLogger.debug("ReadFileMetadata " + (System.nanoTime() - start) / 1000_000L + " ms");
       }
     } catch (BufferOverflowException e) {
       logger.error("Something error happened while reading file metadata of file {}", file);
@@ -278,18 +297,23 @@ public class TsFileSequenceReader implements AutoCloseable {
   public TsFileMetadata readFileMetadataV2() throws IOException {
     try {
       if (tsFileMetaData == null) {
-        long totalSize = metadataIndexInput.size();
+        long start = System.nanoTime();
         ByteBuffer rootNodeOffsetBuffer = ByteBuffer.allocate(Long.BYTES);
-        metadataIndexInput.read(rootNodeOffsetBuffer, totalSize - Long.BYTES);
+        metadataIndexInput.read(rootNodeOffsetBuffer, metadataIndexInput.size() - Long.BYTES);
         rootNodeOffsetBuffer.flip();
 
         long rootNodeOffset = ReadWriteIOUtils.readLong(rootNodeOffsetBuffer);
+        resourceLogger.debug(
+            "ReadFileMetadataV2 " + (System.nanoTime() - start) / 1000_000L + " ms");
+
         tsFileMetaData =
             TsFileMetadataV2.deserializeFrom(
                 readData(
                     rootNodeOffset,
                     FSFactoryProducer.getFSFactory().getFile(this.file + ".index").length(),
                     metadataIndexInput));
+        resourceLogger.debug(
+            "ReadFileMetadataV2 " + (System.nanoTime() - start) / 1000_000L + " ms");
       }
     } catch (BufferOverflowException e) {
       logger.error("Something error happened while reading file metadata of file {}", file);
@@ -443,33 +467,12 @@ public class TsFileSequenceReader implements AutoCloseable {
     return searchResult >= 0 ? timeseriesMetadataList.get(searchResult) : null;
   }
 
-  public TimeseriesMetadata readTimeseriesMetadataV3(Path path, boolean ignoreNotExists)
-      throws IOException {
-    readFileMetadataV2();
-
-    List<TimeseriesMetadata> timeseriesMetadataList = new ArrayList<>();
-    ByteBuffer buffer = readData(position(), fileMetadataPos);
-    while (buffer.hasRemaining()) {
-      try {
-        timeseriesMetadataList.add(TimeseriesMetadata.deserializeFrom(buffer, true));
-      } catch (BufferOverflowException e) {
-        logger.error(
-            "Something error happened while deserializing TimeseriesMetadata of file {}", file);
-        throw e;
-      }
-    }
-    // return null if path does not exist in the TsFile
-    int searchResult =
-        binarySearchInTimeseriesMetadataList(timeseriesMetadataList, path.getMeasurement());
-    return searchResult >= 0 ? timeseriesMetadataList.get(searchResult) : null;
-  }
-
   public TimeseriesMetadata readTimeseriesMetadataV4(Path path, boolean ignoreNotExists)
       throws IOException {
     readFileMetadataV2();
     MetadataIndexNode deviceMetadataIndexNode = tsFileMetaData.getMetadataIndex();
     Pair<MetadataIndexEntry, Long> metadataIndexPair =
-        getMetadataAndEndOffset(deviceMetadataIndexNode, path.getDevice(), true, true);
+        getMetadataAndEndOffsetV2(deviceMetadataIndexNode, path.getDevice(), true, true);
     if (metadataIndexPair == null) {
       if (ignoreNotExists) {
         return null;
@@ -487,7 +490,7 @@ public class TsFileSequenceReader implements AutoCloseable {
         throw e;
       }
       metadataIndexPair =
-          getMetadataAndEndOffset(metadataIndexNode, path.getMeasurement(), false, false);
+          getMetadataAndEndOffsetV2(metadataIndexNode, path.getMeasurement(), false, false);
     }
     if (metadataIndexPair == null) {
       return null;
@@ -596,6 +599,7 @@ public class TsFileSequenceReader implements AutoCloseable {
 
   public List<TimeseriesMetadata> readTimeseriesMetadata(String device, Set<String> measurements)
       throws IOException {
+    long start = System.nanoTime();
     readFileMetadata();
     MetadataIndexNode deviceMetadataIndexNode = tsFileMetaData.getMetadataIndex();
     Pair<MetadataIndexEntry, Long> metadataIndexPair =
@@ -649,6 +653,8 @@ public class TsFileSequenceReader implements AutoCloseable {
           }
         }
         if (measurementsHadFound.size() == measurements.size()) {
+          resourceLogger.debug(
+              "ReadTimeseriesMetadata " + (System.nanoTime() - start) / 1000_000L + " ms");
           return resultTimeseriesMetadataList;
         }
       }
@@ -721,10 +727,12 @@ public class TsFileSequenceReader implements AutoCloseable {
 
   public List<TimeseriesMetadata> readTimeseriesMetadataV3(String device, Set<String> measurements)
       throws IOException {
+    long start = System.nanoTime();
+
     readFileMetadataV2();
     MetadataIndexNode deviceMetadataIndexNode = tsFileMetaData.getMetadataIndex();
     Pair<MetadataIndexEntry, Long> metadataIndexPair =
-        getMetadataAndEndOffset(deviceMetadataIndexNode, device, true, false);
+        getMetadataAndEndOffsetV2(deviceMetadataIndexNode, device, true, false);
     if (metadataIndexPair == null) {
       return Collections.emptyList();
     }
@@ -748,7 +756,7 @@ public class TsFileSequenceReader implements AutoCloseable {
           throw e;
         }
         measurementMetadataIndexPair =
-            getMetadataAndEndOffset(metadataIndexNode, measurementList.get(i), false, false);
+            getMetadataAndEndOffsetV2(metadataIndexNode, measurementList.get(i), false, false);
       }
       if (measurementMetadataIndexPair == null) {
         return Collections.emptyList();
@@ -775,6 +783,8 @@ public class TsFileSequenceReader implements AutoCloseable {
           }
         }
         if (measurementsHadFound.size() == measurements.size()) {
+          resourceLogger.debug(
+              "ReadTimeseriesMetadataV3 " + (System.nanoTime() - start) / 1000_000L + " ms");
           return resultTimeseriesMetadataList;
         }
       }
@@ -782,19 +792,6 @@ public class TsFileSequenceReader implements AutoCloseable {
     return resultTimeseriesMetadataList;
   }
 
-  public MetadataIndexNode readMetadataIndex() throws IOException {
-    long totalSize = metadataIndexInput.size();
-    ByteBuffer lastNodeSizeBuffer = ByteBuffer.allocate(Integer.BYTES);
-    metadataIndexInput.read(lastNodeSizeBuffer, totalSize - Integer.BYTES);
-    lastNodeSizeBuffer.flip();
-
-    int lastNodeSize = ReadWriteIOUtils.readInt(lastNodeSizeBuffer);
-    ByteBuffer lastNode = ByteBuffer.allocate(lastNodeSize);
-    metadataIndexInput.read(lastNode, totalSize - lastNodeSize - Integer.BYTES);
-    lastNode.flip();
-    return MetadataIndexNode.deserializeFrom(lastNode);
-  }
-
   protected int binarySearchInTimeseriesMetadataList(
       List<TimeseriesMetadata> timeseriesMetadataList, String key) {
     int low = 0;
@@ -915,7 +912,7 @@ public class TsFileSequenceReader implements AutoCloseable {
                 .collect(Collectors.toList()));
       } else {
         // keep traversing
-        deviceList.addAll(getAllDevices(node));
+        deviceList.addAll(getAllDevicesV2(node));
       }
     }
     return deviceList;
@@ -1127,6 +1124,31 @@ public class TsFileSequenceReader implements AutoCloseable {
     }
   }
 
+  protected Pair<MetadataIndexEntry, Long> getMetadataAndEndOffsetV2(
+      MetadataIndexNode metadataIndex, String name, boolean isDeviceLevel, boolean exactSearch)
+      throws IOException {
+    try {
+      // When searching for a device node, return when it is not INTERNAL_DEVICE
+      // When searching for a measurement node, return when it is not INTERNAL_MEASUREMENT
+      if ((isDeviceLevel
+              && !metadataIndex.getNodeType().equals(MetadataIndexNodeType.INTERNAL_DEVICE))
+          || (!isDeviceLevel
+              && !metadataIndex.getNodeType().equals(MetadataIndexNodeType.INTERNAL_MEASUREMENT))) {
+        return metadataIndex.getChildIndexEntry(name, exactSearch);
+      } else {
+        Pair<MetadataIndexEntry, Long> childIndexEntry =
+            metadataIndex.getChildIndexEntry(name, false);
+        ByteBuffer buffer =
+            readData(childIndexEntry.left.getOffset(), childIndexEntry.right, metadataIndexInput);
+        return getMetadataAndEndOffsetV2(
+            MetadataIndexNode.deserializeFrom(buffer), name, isDeviceLevel, exactSearch);
+      }
+    } catch (BufferOverflowException e) {
+      logger.error("Something error happened while deserializing MetadataIndex of file {}", file);
+      throw e;
+    }
+  }
+
   /**
    * read data from current position of the input, and deserialize it to a CHUNK_GROUP_FOOTER. <br>
    * This method is not threadsafe.
@@ -1590,15 +1612,6 @@ public class TsFileSequenceReader implements AutoCloseable {
     return chunkMetadataList;
   }
 
-  public List<ChunkMetadata> getChunkMetadataListV3(Path path, boolean ignoreNotExists)
-      throws IOException {
-    TimeseriesMetadata timeseriesMetaData = readTimeseriesMetadataV3(path, ignoreNotExists);
-
-    List<ChunkMetadata> chunkMetadataList = readChunkMetaDataList(timeseriesMetaData);
-    chunkMetadataList.sort(Comparator.comparingLong(IChunkMetadata::getStartTime));
-    return chunkMetadataList;
-  }
-
   public List<ChunkMetadata> getChunkMetadataList(Path path) throws IOException {
     return getChunkMetadataList(path, false);
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java
index b3b43a2..cd547ca 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/query/executor/TsFileExecutor.java
@@ -67,8 +67,7 @@ public class TsFileExecutor implements QueryExecutor {
       queryExpression.setSelectSeries(filteredSeriesPath);
     }
 
-    // metadataQuerier.loadChunkMetaDatas(queryExpression.getSelectedSeries());
-    // metadataQuerier.loadChunkMetaDatasV2(queryExpression.getSelectedSeries());
+    //    metadataQuerier.loadChunkMetaDatas(queryExpression.getSelectedSeries());
     metadataQuerier.loadChunkMetaDatasV3(queryExpression.getSelectedSeries());
 
     if (queryExpression.hasQueryFilter()) {
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
index 4dd528d..625c38c 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
@@ -356,7 +356,7 @@ public class TsFileWriter implements AutoCloseable {
     //  }
     //
     //  public void closeV2() throws IOException {
-    LOG.info("start close file");
+    LOG.info("start close file IN NEW WAY");
     flushAllChunkGroups();
     fileWriter.endFileV3();
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index bba12dc..ac24f39 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -331,86 +331,93 @@ public class TsFileIOWriter {
     }
     canWrite = false;
   }
-
-  public void endFileV2() throws IOException {
-    long metaOffset = out.getPosition();
-
-    // serialize the SEPARATOR of MetaData
-    ReadWriteIOUtils.write(MetaMarker.SEPARATOR, out.wrapAsStream());
-
-    // group ChunkMetadata by series
-    // only contains ordinary path and time column of vector series
-    Map<Path, List<IChunkMetadata>> chunkMetadataListMap = new TreeMap<>();
-
-    // time column -> ChunkMetadataList TreeMap of value columns in vector
-    Map<Path, Map<Path, List<IChunkMetadata>>> vectorToPathsMap = new HashMap<>();
-
-    for (ChunkGroupMetadata chunkGroupMetadata : chunkGroupMetadataList) {
-      List<ChunkMetadata> chunkMetadatas = chunkGroupMetadata.getChunkMetadataList();
-      int idx = 0;
-      while (idx < chunkMetadatas.size()) {
-        IChunkMetadata chunkMetadata = chunkMetadatas.get(idx);
-        if (chunkMetadata.getMask() == 0) {
-          Path series = new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
-          chunkMetadataListMap.computeIfAbsent(series, k -> new ArrayList<>()).add(chunkMetadata);
-          idx++;
-        } else if (chunkMetadata.isTimeColumn()) {
-          // time column of a vector series
-          Path series = new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
-          chunkMetadataListMap.computeIfAbsent(series, k -> new ArrayList<>()).add(chunkMetadata);
-          idx++;
-          Map<Path, List<IChunkMetadata>> chunkMetadataListMapInVector =
-              vectorToPathsMap.computeIfAbsent(series, key -> new TreeMap<>());
-
-          // value columns of a vector series
-          while (idx < chunkMetadatas.size() && chunkMetadatas.get(idx).isValueColumn()) {
-            chunkMetadata = chunkMetadatas.get(idx);
-            Path vectorSeries =
-                new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
-            chunkMetadataListMapInVector
-                .computeIfAbsent(vectorSeries, k -> new ArrayList<>())
-                .add(chunkMetadata);
-            idx++;
-          }
-        }
-      }
-    }
-
-    MetadataIndexNode metadataIndex = flushMetadataIndexV2(chunkMetadataListMap, vectorToPathsMap);
-    TsFileMetadata tsFileMetaData = new TsFileMetadata();
-    tsFileMetaData.setMetadataIndex(metadataIndex);
-    tsFileMetaData.setMetaOffset(metaOffset);
-
-    long footerIndex = out.getPosition();
-    if (logger.isDebugEnabled()) {
-      logger.debug("start to flush the footer,file pos:{}", footerIndex);
-    }
-
-    // write TsFileMetaData
-    int size = tsFileMetaData.serializeTo(out.wrapAsStream());
-    if (logger.isDebugEnabled()) {
-      logger.debug("finish flushing the footer {}, file pos:{}", tsFileMetaData, out.getPosition());
-    }
-
-    // write bloom filter
-    size += tsFileMetaData.serializeBloomFilter(out.wrapAsStream(), chunkMetadataListMap.keySet());
-    if (logger.isDebugEnabled()) {
-      logger.debug("finish flushing the bloom filter file pos:{}", out.getPosition());
-    }
-
-    // write TsFileMetaData size
-    ReadWriteIOUtils.write(size, out.wrapAsStream()); // write the size of the file metadata.
-
-    // write magic string
-    out.write(MAGIC_STRING_BYTES);
-
-    // close file
-    out.close();
-    if (resourceLogger.isDebugEnabled() && file != null) {
-      resourceLogger.debug("{} writer is closed.", file.getName());
-    }
-    canWrite = false;
-  }
+  //
+  //  public void endFileV2() throws IOException {
+  //    long metaOffset = out.getPosition();
+  //
+  //    // serialize the SEPARATOR of MetaData
+  //    ReadWriteIOUtils.write(MetaMarker.SEPARATOR, out.wrapAsStream());
+  //
+  //    // group ChunkMetadata by series
+  //    // only contains ordinary path and time column of vector series
+  //    Map<Path, List<IChunkMetadata>> chunkMetadataListMap = new TreeMap<>();
+  //
+  //    // time column -> ChunkMetadataList TreeMap of value columns in vector
+  //    Map<Path, Map<Path, List<IChunkMetadata>>> vectorToPathsMap = new HashMap<>();
+  //
+  //    for (ChunkGroupMetadata chunkGroupMetadata : chunkGroupMetadataList) {
+  //      List<ChunkMetadata> chunkMetadatas = chunkGroupMetadata.getChunkMetadataList();
+  //      int idx = 0;
+  //      while (idx < chunkMetadatas.size()) {
+  //        IChunkMetadata chunkMetadata = chunkMetadatas.get(idx);
+  //        if (chunkMetadata.getMask() == 0) {
+  //          Path series = new Path(chunkGroupMetadata.getDevice(),
+  // chunkMetadata.getMeasurementUid());
+  //          chunkMetadataListMap.computeIfAbsent(series, k -> new
+  // ArrayList<>()).add(chunkMetadata);
+  //          idx++;
+  //        } else if (chunkMetadata.isTimeColumn()) {
+  //          // time column of a vector series
+  //          Path series = new Path(chunkGroupMetadata.getDevice(),
+  // chunkMetadata.getMeasurementUid());
+  //          chunkMetadataListMap.computeIfAbsent(series, k -> new
+  // ArrayList<>()).add(chunkMetadata);
+  //          idx++;
+  //          Map<Path, List<IChunkMetadata>> chunkMetadataListMapInVector =
+  //              vectorToPathsMap.computeIfAbsent(series, key -> new TreeMap<>());
+  //
+  //          // value columns of a vector series
+  //          while (idx < chunkMetadatas.size() && chunkMetadatas.get(idx).isValueColumn()) {
+  //            chunkMetadata = chunkMetadatas.get(idx);
+  //            Path vectorSeries =
+  //                new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
+  //            chunkMetadataListMapInVector
+  //                .computeIfAbsent(vectorSeries, k -> new ArrayList<>())
+  //                .add(chunkMetadata);
+  //            idx++;
+  //          }
+  //        }
+  //      }
+  //    }
+  //
+  //    MetadataIndexNode metadataIndex = flushMetadataIndexV2(chunkMetadataListMap,
+  // vectorToPathsMap);
+  //    TsFileMetadata tsFileMetaData = new TsFileMetadata();
+  //    tsFileMetaData.setMetadataIndex(metadataIndex);
+  //    tsFileMetaData.setMetaOffset(metaOffset);
+  //
+  //    long footerIndex = out.getPosition();
+  //    if (logger.isDebugEnabled()) {
+  //      logger.debug("start to flush the footer,file pos:{}", footerIndex);
+  //    }
+  //
+  //    // write TsFileMetaData
+  //    int size = tsFileMetaData.serializeTo(out.wrapAsStream());
+  //    if (logger.isDebugEnabled()) {
+  //      logger.debug("finish flushing the footer {}, file pos:{}", tsFileMetaData,
+  // out.getPosition());
+  //    }
+  //
+  //    // write bloom filter
+  //    size += tsFileMetaData.serializeBloomFilter(out.wrapAsStream(),
+  // chunkMetadataListMap.keySet());
+  //    if (logger.isDebugEnabled()) {
+  //      logger.debug("finish flushing the bloom filter file pos:{}", out.getPosition());
+  //    }
+  //
+  //    // write TsFileMetaData size
+  //    ReadWriteIOUtils.write(size, out.wrapAsStream()); // write the size of the file metadata.
+  //
+  //    // write magic string
+  //    out.write(MAGIC_STRING_BYTES);
+  //
+  //    // close file
+  //    out.close();
+  //    if (resourceLogger.isDebugEnabled() && file != null) {
+  //      resourceLogger.debug("{} writer is closed.", file.getName());
+  //    }
+  //    canWrite = false;
+  //  }
 
   public void endFileV3() throws IOException {
     long metaOffset = out.getPosition();
@@ -497,7 +504,7 @@ public class TsFileIOWriter {
     // close file
     out.close();
     if (resourceLogger.isDebugEnabled() && file != null) {
-      resourceLogger.debug("{} writer is closed.", file.getName());
+      resourceLogger.debug("{} writer is closed IN NEW WAY.", file.getName());
     }
     canWrite = false;
   }

[iotdb] 01/03: sepersate MetadataIndexTree

Posted by su...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sunzesong pushed a commit to branch experimental/index
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit a92ee19ade6b9ed2d628ccdec86d6b74660b87ab
Author: samperson1997 <sz...@mails.tsinghua.edu.cn>
AuthorDate: Wed Nov 10 13:56:51 2021 +0800

    sepersate MetadataIndexTree
---
 .../{test1832 => test1835}/TsFileSketchTool.java   |   8 +-
 .../TsFileSketchToolV2.java}                       | 195 ++++++++++-----------
 .../apache/iotdb/tsfile/test1835/TsFileWrite.java  | 110 ++++++++++++
 .../file/metadata/MetadataIndexConstructor.java    |  42 +++--
 .../tsfile/file/metadata/TsFileMetadataV2.java     | 126 +++++++++++++
 .../fileSystem/fsFactory/LocalFSFactory.java       |  20 +--
 .../iotdb/tsfile/read/TsFileSequenceReader.java    |  40 +++++
 .../apache/iotdb/tsfile/write/TsFileWriter.java    |   2 +-
 .../iotdb/tsfile/write/writer/TsFileIOWriter.java  |  97 +++++++++-
 .../iotdb/tsfile/utils/FilePathUtilsTest.java      |   4 +-
 10 files changed, 500 insertions(+), 144 deletions(-)

diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1832/TsFileSketchTool.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchTool.java
similarity index 98%
copy from example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1832/TsFileSketchTool.java
copy to example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchTool.java
index ead2726..71a6aa5 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1832/TsFileSketchTool.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchTool.java
@@ -17,7 +17,7 @@
  * under the License.
  */
 
-package org.apache.iotdb.tsfile.test1832;
+package org.apache.iotdb.tsfile.test1835;
 
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.file.MetaMarker;
@@ -56,10 +56,8 @@ public class TsFileSketchTool {
   private String splitStr; // for split different part of TsFile
 
   public static void main(String[] args) throws IOException {
-    Pair<String, String> fileNames = checkArgs(args);
-    String filename =
-        "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/0/1832/test5.tsfile";
-    String outFile = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/0/1832/1.txt";
+    String filename = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test1.tsfile";
+    String outFile = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test1.txt";
     new TsFileSketchTool(filename, outFile).run();
   }
 
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1832/TsFileSketchTool.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
similarity index 77%
rename from example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1832/TsFileSketchTool.java
rename to example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
index ead2726..869435f 100644
--- a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1832/TsFileSketchTool.java
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileSketchToolV2.java
@@ -17,7 +17,7 @@
  * under the License.
  */
 
-package org.apache.iotdb.tsfile.test1832;
+package org.apache.iotdb.tsfile.test1835;
 
 import org.apache.iotdb.tsfile.common.conf.TSFileConfig;
 import org.apache.iotdb.tsfile.file.MetaMarker;
@@ -29,51 +29,58 @@ import org.apache.iotdb.tsfile.file.metadata.IChunkMetadata;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexEntry;
 import org.apache.iotdb.tsfile.file.metadata.MetadataIndexNode;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
-import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
+import org.apache.iotdb.tsfile.file.metadata.TsFileMetadataV2;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
 import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
 import org.apache.iotdb.tsfile.read.common.Chunk;
 import org.apache.iotdb.tsfile.read.common.Path;
+import org.apache.iotdb.tsfile.read.reader.TsFileInput;
 import org.apache.iotdb.tsfile.utils.BloomFilter;
 import org.apache.iotdb.tsfile.utils.Pair;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
 
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
-import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
-public class TsFileSketchTool {
+public class TsFileSketchToolV2 {
 
   private String filename;
+  private String indexFileName;
   private PrintWriter pw;
   private TsFileSketchToolReader reader;
+  private TsFileSketchToolReader indexReader;
   private String splitStr; // for split different part of TsFile
 
   public static void main(String[] args) throws IOException {
-    Pair<String, String> fileNames = checkArgs(args);
-    String filename =
-        "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/0/1832/test5.tsfile";
-    String outFile = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/0/1832/1.txt";
-    new TsFileSketchTool(filename, outFile).run();
+    String filename = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test0.tsfile";
+    String outFile = "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test0.txt";
+    String indexFileName =
+        "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/1/test0.tsfile.index";
+
+    new TsFileSketchToolV2(filename, indexFileName, outFile).run();
   }
 
   /**
    * construct TsFileSketchTool
    *
    * @param filename input file path
+   * @param indexFileName index file path
    * @param outFile output file path
    */
-  public TsFileSketchTool(String filename, String outFile) {
+  public TsFileSketchToolV2(String filename, String indexFileName, String outFile) {
     try {
       this.filename = filename;
+      this.indexFileName = indexFileName;
       pw = new PrintWriter(new FileWriter(outFile));
       reader = new TsFileSketchToolReader(filename);
+      indexReader = new TsFileSketchToolReader(indexFileName);
       StringBuilder str1 = new StringBuilder();
       for (int i = 0; i < 21; i++) {
         str1.append("|");
@@ -93,7 +100,7 @@ public class TsFileSketchTool {
     printlnBoth(pw, "file length: " + length);
 
     // get metadata information
-    TsFileMetadata tsFileMetaData = reader.readFileMetadata();
+    TsFileMetadataV2 tsFileMetaData = reader.readFileMetadataV2();
     List<ChunkGroupMetadata> allChunkGroupMetadata = new ArrayList<>();
     reader.selfCheck(null, allChunkGroupMetadata, false);
 
@@ -104,27 +111,17 @@ public class TsFileSketchTool {
     printChunk(allChunkGroupMetadata);
 
     // metadata begins
-    if (tsFileMetaData.getMetadataIndex().getChildren().isEmpty()) {
-      printlnBoth(pw, String.format("%20s", reader.getFileMetadataPos() - 1) + "|\t[marker] 2");
-    } else {
-      printlnBoth(
-          pw, String.format("%20s", reader.readFileMetadata().getMetaOffset()) + "|\t[marker] 2");
-    }
+    printlnBoth(pw, String.format("%20s", tsFileMetaData.getMetaOffset()) + "|\t[marker] 2");
+
+    //    System.out.println(reader.getFileMetadataPos());
+
     // get all timeseries index
     Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap =
-        reader.getAllTimeseriesMetadataWithOffset();
+        reader.getAllTimeseriesMetadataWithOffset(reader.position(), reader.getFileMetadataPos());
 
     // print timeseries index
     printTimeseriesIndex(timeseriesMetadataMap);
 
-    MetadataIndexNode metadataIndexNode = tsFileMetaData.getMetadataIndex();
-    TreeMap<Long, MetadataIndexNode> metadataIndexNodeMap = new TreeMap<>();
-    List<String> treeOutputStringBuffer = new ArrayList<>();
-    loadIndexTree(metadataIndexNode, metadataIndexNodeMap, treeOutputStringBuffer, 0);
-
-    // print IndexOfTimerseriesIndex
-    printIndexOfTimerseriesIndex(metadataIndexNodeMap);
-
     // print TsFile Metadata
     printTsFileMetadata(tsFileMetaData);
 
@@ -133,6 +130,14 @@ public class TsFileSketchTool {
         pw,
         "---------------------------- IndexOfTimerseriesIndex Tree -----------------------------");
     // print index tree
+    MetadataIndexNode metadataIndexNode = readMetadataIndex();
+    TreeMap<Long, MetadataIndexNode> metadataIndexNodeMap = new TreeMap<>();
+    List<String> treeOutputStringBuffer = new ArrayList<>();
+    loadIndexTree(metadataIndexNode, metadataIndexNodeMap, treeOutputStringBuffer, 0);
+
+    // print IndexOfTimerseriesIndex
+    printIndexOfTimerseriesIndex(metadataIndexNodeMap);
+
     for (String str : treeOutputStringBuffer) {
       printlnBoth(pw, str);
     }
@@ -142,22 +147,11 @@ public class TsFileSketchTool {
     pw.close();
   }
 
-  private void printTsFileMetadata(TsFileMetadata tsFileMetaData) {
+  private void printTsFileMetadata(TsFileMetadataV2 tsFileMetaData) {
     try {
       printlnBoth(pw, String.format("%20s", reader.getFileMetadataPos()) + "|\t[TsFileMetadata]");
       printlnBoth(
           pw, String.format("%20s", "") + "|\t\t[meta offset] " + tsFileMetaData.getMetaOffset());
-      printlnBoth(
-          pw,
-          String.format("%20s", "")
-              + "|\t\t[num of devices] "
-              + tsFileMetaData.getMetadataIndex().getChildren().size());
-      printlnBoth(
-          pw,
-          String.format("%20s", "")
-              + "|\t\t"
-              + tsFileMetaData.getMetadataIndex().getChildren().size()
-              + " key&TsMetadataIndex");
       // bloom filter
       BloomFilter bloomFilter = tsFileMetaData.getBloomFilter();
       printlnBoth(
@@ -339,7 +333,8 @@ public class TsFileSketchTool {
                 + entry.getValue().left
                 + ", tsDataType:"
                 + entry.getValue().right.getTSDataType());
-        for (IChunkMetadata chunkMetadata : reader.getChunkMetadataList(entry.getValue().left)) {
+        for (IChunkMetadata chunkMetadata :
+            reader.getChunkMetadataListV3(entry.getValue().left, false)) {
           printlnBoth(
               pw,
               String.format("%20s", "")
@@ -394,7 +389,7 @@ public class TsFileSketchTool {
           endOffset = metadataIndexNode.getChildren().get(i + 1).getOffset();
         }
         MetadataIndexNode subNode =
-            reader.getMetadataIndexNode(metadataIndexEntry.getOffset(), endOffset);
+            indexReader.getMetadataIndexNode(metadataIndexEntry.getOffset(), endOffset);
         metadataIndexNodeMap.put(metadataIndexEntry.getOffset(), subNode);
         loadIndexTree(subNode, metadataIndexNodeMap, treeOutputStringBuffer, deep + 1);
       }
@@ -406,16 +401,18 @@ public class TsFileSketchTool {
     pw.println(str);
   }
 
-  private static Pair<String, String> checkArgs(String[] args) {
-    String filename = "test.tsfile";
-    String outFile = "TsFile_sketch_view.txt";
-    if (args.length == 1) {
-      filename = args[0];
-    } else if (args.length == 2) {
-      filename = args[0];
-      outFile = args[1];
-    }
-    return new Pair<>(filename, outFile);
+  private MetadataIndexNode readMetadataIndex() throws IOException {
+    TsFileInput tsFileInput = FSFactoryProducer.getFileInputFactory().getTsFileInput(indexFileName);
+    long totalSize = tsFileInput.size();
+    ByteBuffer lastNodeSizeBuffer = ByteBuffer.allocate(Integer.BYTES);
+    tsFileInput.read(lastNodeSizeBuffer, totalSize - Integer.BYTES);
+    lastNodeSizeBuffer.flip();
+
+    int lastNodeSize = ReadWriteIOUtils.readInt(lastNodeSizeBuffer);
+    ByteBuffer lastNode = ByteBuffer.allocate(lastNodeSize);
+    tsFileInput.read(lastNode, totalSize - lastNodeSize - Integer.BYTES);
+    lastNode.flip();
+    return MetadataIndexNode.deserializeFrom(lastNode);
   }
 
   private class TsFileSketchToolReader extends TsFileSequenceReader {
@@ -440,69 +437,53 @@ public class TsFileSketchTool {
         Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap,
         boolean needChunkMetadata)
         throws IOException {
-      try {
-        if (type.equals(MetadataIndexNodeType.LEAF_MEASUREMENT)) {
-          while (buffer.hasRemaining()) {
-            long pos = startOffset + buffer.position();
-            TimeseriesMetadata timeseriesMetadata =
-                TimeseriesMetadata.deserializeFrom(buffer, needChunkMetadata);
-            timeseriesMetadataMap.put(
-                pos,
-                new Pair<>(
-                    new Path(deviceId, timeseriesMetadata.getMeasurementId()), timeseriesMetadata));
-          }
-        } else {
-          // deviceId should be determined by LEAF_DEVICE node
-          if (type.equals(MetadataIndexNodeType.LEAF_DEVICE)) {
-            deviceId = metadataIndex.getName();
-          }
-          MetadataIndexNode metadataIndexNode = MetadataIndexNode.deserializeFrom(buffer);
-          int metadataIndexListSize = metadataIndexNode.getChildren().size();
-          for (int i = 0; i < metadataIndexListSize; i++) {
-            long endOffset = metadataIndexNode.getEndOffset();
-            if (i != metadataIndexListSize - 1) {
-              endOffset = metadataIndexNode.getChildren().get(i + 1).getOffset();
-            }
-            ByteBuffer nextBuffer =
-                readData(metadataIndexNode.getChildren().get(i).getOffset(), endOffset);
-            generateMetadataIndexWithOffset(
-                metadataIndexNode.getChildren().get(i).getOffset(),
-                metadataIndexNode.getChildren().get(i),
-                nextBuffer,
-                deviceId,
-                metadataIndexNode.getNodeType(),
-                timeseriesMetadataMap,
-                needChunkMetadata);
+      if (type.equals(MetadataIndexNodeType.LEAF_MEASUREMENT)) {
+        while (buffer.hasRemaining()) {
+          long pos = startOffset + buffer.position();
+          TimeseriesMetadata timeseriesMetadata =
+              TimeseriesMetadata.deserializeFrom(buffer, needChunkMetadata);
+          timeseriesMetadataMap.put(
+              pos,
+              new Pair<>(
+                  new Path(deviceId, timeseriesMetadata.getMeasurementId()), timeseriesMetadata));
+        }
+      } else {
+        // deviceId should be determined by LEAF_DEVICE node
+        if (type.equals(MetadataIndexNodeType.LEAF_DEVICE)) {
+          deviceId = metadataIndex.getName();
+        }
+        MetadataIndexNode metadataIndexNode = MetadataIndexNode.deserializeFrom(buffer);
+        int metadataIndexListSize = metadataIndexNode.getChildren().size();
+        for (int i = 0; i < metadataIndexListSize; i++) {
+          long endOffset = metadataIndexNode.getEndOffset();
+          if (i != metadataIndexListSize - 1) {
+            endOffset = metadataIndexNode.getChildren().get(i + 1).getOffset();
           }
+          ByteBuffer nextBuffer =
+              readData(metadataIndexNode.getChildren().get(i).getOffset(), endOffset);
+          generateMetadataIndexWithOffset(
+              metadataIndexNode.getChildren().get(i).getOffset(),
+              metadataIndexNode.getChildren().get(i),
+              nextBuffer,
+              deviceId,
+              metadataIndexNode.getNodeType(),
+              timeseriesMetadataMap,
+              needChunkMetadata);
         }
-      } catch (BufferOverflowException e) {
-        throw e;
       }
     }
 
-    public Map<Long, Pair<Path, TimeseriesMetadata>> getAllTimeseriesMetadataWithOffset()
-        throws IOException {
-      if (tsFileMetaData == null) {
-        readFileMetadata();
-      }
-      MetadataIndexNode metadataIndexNode = tsFileMetaData.getMetadataIndex();
+    public Map<Long, Pair<Path, TimeseriesMetadata>> getAllTimeseriesMetadataWithOffset(
+        long startOffset, long endOffset) throws IOException {
       Map<Long, Pair<Path, TimeseriesMetadata>> timeseriesMetadataMap = new TreeMap<>();
-      List<MetadataIndexEntry> metadataIndexEntryList = metadataIndexNode.getChildren();
-      for (int i = 0; i < metadataIndexEntryList.size(); i++) {
-        MetadataIndexEntry metadataIndexEntry = metadataIndexEntryList.get(i);
-        long endOffset = tsFileMetaData.getMetadataIndex().getEndOffset();
-        if (i != metadataIndexEntryList.size() - 1) {
-          endOffset = metadataIndexEntryList.get(i + 1).getOffset();
-        }
-        ByteBuffer buffer = readData(metadataIndexEntry.getOffset(), endOffset);
-        generateMetadataIndexWithOffset(
-            metadataIndexEntry.getOffset(),
-            metadataIndexEntry,
-            buffer,
-            null,
-            metadataIndexNode.getNodeType(),
-            timeseriesMetadataMap,
-            false);
+
+      ByteBuffer buffer = readData(startOffset, endOffset);
+      while (buffer.hasRemaining()) {
+        int bufferPos = buffer.position();
+        TimeseriesMetadata timeseriesMetaData = TimeseriesMetadata.deserializeFrom(buffer, false);
+        timeseriesMetadataMap.put(
+            reader.position() + bufferPos,
+            new Pair<>(new Path("d1", timeseriesMetaData.getMeasurementId()), timeseriesMetaData));
       }
       return timeseriesMetadataMap;
     }
diff --git a/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
new file mode 100644
index 0000000..aa087b4
--- /dev/null
+++ b/example/tsfile/src/main/java/org/apache/iotdb/tsfile/test1835/TsFileWrite.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.tsfile.test1835;
+
+import org.apache.iotdb.tsfile.Constant;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
+import org.apache.iotdb.tsfile.fileSystem.FSFactoryProducer;
+import org.apache.iotdb.tsfile.read.common.Path;
+import org.apache.iotdb.tsfile.write.TsFileWriter;
+import org.apache.iotdb.tsfile.write.record.TSRecord;
+import org.apache.iotdb.tsfile.write.record.datapoint.DataPoint;
+import org.apache.iotdb.tsfile.write.record.datapoint.LongDataPoint;
+import org.apache.iotdb.tsfile.write.schema.UnaryMeasurementSchema;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Options;
+
+import java.io.File;
+import java.util.Random;
+
+/**
+ * An example of writing data with TSRecord to TsFile It uses the interface: public void
+ * addMeasurement(MeasurementSchema measurementSchema) throws WriteProcessException
+ */
+public class TsFileWrite {
+  public static int deviceNum = 1;
+  public static int sensorNum = 1;
+  public static int fileNum = 1;
+
+  public static void main(String[] args) {
+    Options opts = new Options();
+    //    Option chunkNumOption =
+    //        OptionBuilder.withArgName("args").withLongOpt("chunkNum").hasArg().create("c");
+    //    opts.addOption(chunkNumOption);
+
+    BasicParser parser = new BasicParser();
+    CommandLine cl;
+    try {
+      cl = parser.parse(opts, args);
+      //      chunkNum = Integer.parseInt(cl.getOptionValue("c"));
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    for (int fileIndex = 0; fileIndex < fileNum; fileIndex++) {
+      try {
+        String path =
+            "/Users/samperson1997/git/iotdb/data/data/sequence/root.sg/1/"
+                + deviceNum
+                + "/test"
+                + 0
+                + ".tsfile";
+        File f = FSFactoryProducer.getFSFactory().getFile(path);
+        if (f.exists()) {
+          f.delete();
+        }
+
+        try {
+          TsFileWriter tsFileWriter = new TsFileWriter(f);
+          // 1000 timeseries
+          for (int i = 1; i <= 1000; i++) {
+            tsFileWriter.registerTimeseries(
+                new Path(Constant.DEVICE_PREFIX, Constant.SENSOR_ + i),
+                new UnaryMeasurementSchema(Constant.SENSOR_ + i, TSDataType.INT64, TSEncoding.RLE));
+          }
+          // construct TSRecord
+          for (int i = 1; i <= 100; i++) {
+            TSRecord tsRecord = new TSRecord(i, Constant.DEVICE_PREFIX);
+            for (int t = 1; t <= 1000; t++) {
+              DataPoint dPoint1 = new LongDataPoint(Constant.SENSOR_ + t, new Random().nextLong());
+              tsRecord.addTuple(dPoint1);
+            }
+            // write TSRecord
+            tsFileWriter.write(tsRecord);
+            if (i % 100 == 0) {
+              tsFileWriter.flushAllChunkGroups();
+            }
+          }
+          tsFileWriter.close();
+        } catch (Throwable e) {
+          e.printStackTrace();
+          System.out.println(e.getMessage());
+        }
+
+      } catch (Throwable e) {
+        e.printStackTrace();
+        System.out.println(e.getMessage());
+      }
+    }
+  }
+}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java
index de20f40..f8926d4 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/MetadataIndexConstructor.java
@@ -44,11 +44,14 @@ public class MetadataIndexConstructor {
    * Construct metadata index tree
    *
    * @param deviceTimeseriesMetadataMap device => TimeseriesMetadata list
-   * @param out tsfile output
+   * @param tsFileOutput tsfile output
+   * @param metadataIndexOutput metadataIndex output
    */
   @SuppressWarnings("squid:S3776") // Suppress high Cognitive Complexity warning
   public static MetadataIndexNode constructMetadataIndex(
-      Map<String, List<TimeseriesMetadata>> deviceTimeseriesMetadataMap, TsFileOutput out)
+      Map<String, List<TimeseriesMetadata>> deviceTimeseriesMetadataMap,
+      TsFileOutput tsFileOutput,
+      TsFileOutput metadataIndexOutput)
       throws IOException {
 
     Map<String, MetadataIndexNode> deviceMetadataIndexMap = new TreeMap<>();
@@ -68,21 +71,25 @@ public class MetadataIndexConstructor {
         if (serializedTimeseriesMetadataNum == 0
             || serializedTimeseriesMetadataNum >= config.getMaxDegreeOfIndexNode()) {
           if (currentIndexNode.isFull()) {
-            addCurrentIndexNodeToQueue(currentIndexNode, measurementMetadataIndexQueue, out);
+            addCurrentIndexNodeToQueue(
+                currentIndexNode, measurementMetadataIndexQueue, tsFileOutput);
             currentIndexNode = new MetadataIndexNode(MetadataIndexNodeType.LEAF_MEASUREMENT);
           }
           currentIndexNode.addEntry(
-              new MetadataIndexEntry(timeseriesMetadata.getMeasurementId(), out.getPosition()));
+              new MetadataIndexEntry(
+                  timeseriesMetadata.getMeasurementId(), tsFileOutput.getPosition()));
           serializedTimeseriesMetadataNum = 0;
         }
-        timeseriesMetadata.serializeTo(out.wrapAsStream());
+        timeseriesMetadata.serializeTo(tsFileOutput.wrapAsStream());
         serializedTimeseriesMetadataNum++;
       }
-      addCurrentIndexNodeToQueue(currentIndexNode, measurementMetadataIndexQueue, out);
+      addCurrentIndexNodeToQueue(currentIndexNode, measurementMetadataIndexQueue, tsFileOutput);
       deviceMetadataIndexMap.put(
           entry.getKey(),
           generateRootNode(
-              measurementMetadataIndexQueue, out, MetadataIndexNodeType.INTERNAL_MEASUREMENT));
+              measurementMetadataIndexQueue,
+              metadataIndexOutput,
+              MetadataIndexNodeType.INTERNAL_MEASUREMENT));
     }
 
     // if not exceed the max child nodes num, ignore the device index and directly point to the
@@ -91,10 +98,11 @@ public class MetadataIndexConstructor {
       MetadataIndexNode metadataIndexNode =
           new MetadataIndexNode(MetadataIndexNodeType.LEAF_DEVICE);
       for (Map.Entry<String, MetadataIndexNode> entry : deviceMetadataIndexMap.entrySet()) {
-        metadataIndexNode.addEntry(new MetadataIndexEntry(entry.getKey(), out.getPosition()));
-        entry.getValue().serializeTo(out.wrapAsStream());
+        metadataIndexNode.addEntry(
+            new MetadataIndexEntry(entry.getKey(), metadataIndexOutput.getPosition()));
+        entry.getValue().serializeTo(metadataIndexOutput.wrapAsStream());
       }
-      metadataIndexNode.setEndOffset(out.getPosition());
+      metadataIndexNode.setEndOffset(metadataIndexOutput.getPosition());
       return metadataIndexNode;
     }
 
@@ -105,16 +113,18 @@ public class MetadataIndexConstructor {
     for (Map.Entry<String, MetadataIndexNode> entry : deviceMetadataIndexMap.entrySet()) {
       // when constructing from internal node, each node is related to an entry
       if (currentIndexNode.isFull()) {
-        addCurrentIndexNodeToQueue(currentIndexNode, deviceMetadataIndexQueue, out);
+        addCurrentIndexNodeToQueue(currentIndexNode, deviceMetadataIndexQueue, metadataIndexOutput);
         currentIndexNode = new MetadataIndexNode(MetadataIndexNodeType.LEAF_DEVICE);
       }
-      currentIndexNode.addEntry(new MetadataIndexEntry(entry.getKey(), out.getPosition()));
-      entry.getValue().serializeTo(out.wrapAsStream());
+      currentIndexNode.addEntry(
+          new MetadataIndexEntry(entry.getKey(), metadataIndexOutput.getPosition()));
+      entry.getValue().serializeTo(metadataIndexOutput.wrapAsStream());
     }
-    addCurrentIndexNodeToQueue(currentIndexNode, deviceMetadataIndexQueue, out);
+    addCurrentIndexNodeToQueue(currentIndexNode, deviceMetadataIndexQueue, metadataIndexOutput);
     MetadataIndexNode deviceMetadataIndexNode =
-        generateRootNode(deviceMetadataIndexQueue, out, MetadataIndexNodeType.INTERNAL_DEVICE);
-    deviceMetadataIndexNode.setEndOffset(out.getPosition());
+        generateRootNode(
+            deviceMetadataIndexQueue, metadataIndexOutput, MetadataIndexNodeType.INTERNAL_DEVICE);
+    deviceMetadataIndexNode.setEndOffset(metadataIndexOutput.getPosition());
     return deviceMetadataIndexNode;
   }
 
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java
new file mode 100644
index 0000000..64246b7
--- /dev/null
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/file/metadata/TsFileMetadataV2.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.tsfile.file.metadata;
+
+import org.apache.iotdb.tsfile.common.conf.TSFileDescriptor;
+import org.apache.iotdb.tsfile.read.common.Path;
+import org.apache.iotdb.tsfile.utils.BloomFilter;
+import org.apache.iotdb.tsfile.utils.ReadWriteForEncodingUtils;
+import org.apache.iotdb.tsfile.utils.ReadWriteIOUtils;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.Set;
+
+/** TSFileMetaData collects all metadata info and saves in its data structure. */
+public class TsFileMetadataV2 {
+
+  // bloom filter
+  private BloomFilter bloomFilter;
+
+  // offset of MetaMarker.SEPARATOR
+  private long metaOffset;
+
+  /**
+   * deserialize data from the buffer.
+   *
+   * @param buffer -buffer use to deserialize
+   * @return -a instance of TsFileMetaData
+   */
+  public static TsFileMetadataV2 deserializeFrom(ByteBuffer buffer) {
+    TsFileMetadataV2 fileMetaData = new TsFileMetadataV2();
+
+    // metaOffset
+    long metaOffset = ReadWriteIOUtils.readLong(buffer);
+    fileMetaData.setMetaOffset(metaOffset);
+
+    // read bloom filter
+    if (buffer.hasRemaining()) {
+      byte[] bytes = ReadWriteIOUtils.readByteBufferWithSelfDescriptionLength(buffer);
+      int filterSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
+      int hashFunctionSize = ReadWriteForEncodingUtils.readUnsignedVarInt(buffer);
+      fileMetaData.bloomFilter = BloomFilter.buildBloomFilter(bytes, filterSize, hashFunctionSize);
+    }
+
+    return fileMetaData;
+  }
+
+  public BloomFilter getBloomFilter() {
+    return bloomFilter;
+  }
+
+  public void setBloomFilter(BloomFilter bloomFilter) {
+    this.bloomFilter = bloomFilter;
+  }
+
+  /**
+   * use the given outputStream to serialize.
+   *
+   * @param outputStream -output stream to determine byte length
+   * @return -byte length
+   */
+  public int serializeTo(OutputStream outputStream) throws IOException {
+    return ReadWriteIOUtils.write(metaOffset, outputStream);
+  }
+
+  /**
+   * use the given outputStream to serialize bloom filter.
+   *
+   * @param outputStream -output stream to determine byte length
+   * @return -byte length
+   */
+  public int serializeBloomFilter(OutputStream outputStream, Set<Path> paths) throws IOException {
+    int byteLen = 0;
+    BloomFilter filter = buildBloomFilter(paths);
+
+    byte[] bytes = filter.serialize();
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(bytes.length, outputStream);
+    outputStream.write(bytes);
+    byteLen += bytes.length;
+    byteLen += ReadWriteForEncodingUtils.writeUnsignedVarInt(filter.getSize(), outputStream);
+    byteLen +=
+        ReadWriteForEncodingUtils.writeUnsignedVarInt(filter.getHashFunctionSize(), outputStream);
+    return byteLen;
+  }
+
+  /**
+   * build bloom filter
+   *
+   * @return bloom filter
+   */
+  private BloomFilter buildBloomFilter(Set<Path> paths) {
+    BloomFilter filter =
+        BloomFilter.getEmptyBloomFilter(
+            TSFileDescriptor.getInstance().getConfig().getBloomFilterErrorRate(), paths.size());
+    for (Path path : paths) {
+      filter.add(path.toString());
+    }
+    return filter;
+  }
+
+  public long getMetaOffset() {
+    return metaOffset;
+  }
+
+  public void setMetaOffset(long metaOffset) {
+    this.metaOffset = metaOffset;
+  }
+}
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/fileSystem/fsFactory/LocalFSFactory.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/fileSystem/fsFactory/LocalFSFactory.java
index 365ded1..8ea24c5 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/fileSystem/fsFactory/LocalFSFactory.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/fileSystem/fsFactory/LocalFSFactory.java
@@ -19,7 +19,7 @@
 
 package org.apache.iotdb.tsfile.fileSystem.fsFactory;
 
-import org.apache.commons.io.FileUtils;
+// import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -112,15 +112,15 @@ public class LocalFSFactory implements FSFactory {
 
   @Override
   public void moveFile(File srcFile, File destFile) {
-    try {
-      FileUtils.moveFile(srcFile, destFile);
-    } catch (IOException e) {
-      logger.error(
-          "Failed to move file from {} to {}. ",
-          srcFile.getAbsolutePath(),
-          destFile.getAbsolutePath(),
-          e);
-    }
+    //    try {
+    //      FileUtils.moveFile(srcFile, destFile);
+    //    } catch (IOException e) {
+    //      logger.error(
+    //          "Failed to move file from {} to {}. ",
+    //          srcFile.getAbsolutePath(),
+    //          destFile.getAbsolutePath(),
+    //          e);
+    //    }
   }
 
   @Override
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index 9a24644..565d677 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -35,6 +35,7 @@ import org.apache.iotdb.tsfile.file.metadata.MetadataIndexNode;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadataV2;
 import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
+import org.apache.iotdb.tsfile.file.metadata.TsFileMetadataV2;
 import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
 import org.apache.iotdb.tsfile.file.metadata.enums.MetadataIndexNodeType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
@@ -270,6 +271,15 @@ public class TsFileSequenceReader implements AutoCloseable {
     return tsFileMetaData;
   }
 
+  public TsFileMetadataV2 readFileMetadataV2() throws IOException {
+    try {
+      return TsFileMetadataV2.deserializeFrom(readData(fileMetadataPos, fileMetadataSize));
+    } catch (BufferOverflowException e) {
+      logger.error("Something error happened while reading file metadata of file {}", file);
+      throw e;
+    }
+  }
+
   /**
    * this function does not modify the position of the file reader.
    *
@@ -415,6 +425,27 @@ public class TsFileSequenceReader implements AutoCloseable {
     return searchResult >= 0 ? timeseriesMetadataList.get(searchResult) : null;
   }
 
+  public TimeseriesMetadata readTimeseriesMetadataV3(Path path, boolean ignoreNotExists)
+      throws IOException {
+    readFileMetadataV2();
+
+    List<TimeseriesMetadata> timeseriesMetadataList = new ArrayList<>();
+    ByteBuffer buffer = readData(position(), fileMetadataPos);
+    while (buffer.hasRemaining()) {
+      try {
+        timeseriesMetadataList.add(TimeseriesMetadata.deserializeFrom(buffer, true));
+      } catch (BufferOverflowException e) {
+        logger.error(
+            "Something error happened while deserializing TimeseriesMetadata of file {}", file);
+        throw e;
+      }
+    }
+    // return null if path does not exist in the TsFile
+    int searchResult =
+        binarySearchInTimeseriesMetadataList(timeseriesMetadataList, path.getMeasurement());
+    return searchResult >= 0 ? timeseriesMetadataList.get(searchResult) : null;
+  }
+
   /**
    * Find the leaf node that contains this vector, return all the needed subSensor and time column
    *
@@ -1352,6 +1383,15 @@ public class TsFileSequenceReader implements AutoCloseable {
     return chunkMetadataList;
   }
 
+  public List<ChunkMetadata> getChunkMetadataListV3(Path path, boolean ignoreNotExists)
+      throws IOException {
+    TimeseriesMetadata timeseriesMetaData = readTimeseriesMetadataV3(path, ignoreNotExists);
+
+    List<ChunkMetadata> chunkMetadataList = readChunkMetaDataList(timeseriesMetaData);
+    chunkMetadataList.sort(Comparator.comparingLong(IChunkMetadata::getStartTime));
+    return chunkMetadataList;
+  }
+
   public List<ChunkMetadata> getChunkMetadataList(Path path) throws IOException {
     return getChunkMetadataList(path, false);
   }
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
index 3ae214e..4dd528d 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/TsFileWriter.java
@@ -358,7 +358,7 @@ public class TsFileWriter implements AutoCloseable {
     //  public void closeV2() throws IOException {
     LOG.info("start close file");
     flushAllChunkGroups();
-    fileWriter.endFileV2();
+    fileWriter.endFileV3();
   }
 
   /**
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
index 17036ff..3c0185c 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/write/writer/TsFileIOWriter.java
@@ -32,6 +32,7 @@ import org.apache.iotdb.tsfile.file.metadata.MetadataIndexNode;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadata;
 import org.apache.iotdb.tsfile.file.metadata.TimeseriesMetadataV2;
 import org.apache.iotdb.tsfile.file.metadata.TsFileMetadata;
+import org.apache.iotdb.tsfile.file.metadata.TsFileMetadataV2;
 import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
 import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
@@ -47,6 +48,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
@@ -293,7 +295,8 @@ public class TsFileIOWriter {
       }
     }
 
-    MetadataIndexNode metadataIndex = flushMetadataIndex(chunkMetadataListMap, vectorToPathsMap);
+    MetadataIndexNode metadataIndex =
+        flushMetadataIndex(chunkMetadataListMap, vectorToPathsMap, out);
     TsFileMetadata tsFileMetaData = new TsFileMetadata();
     tsFileMetaData.setMetadataIndex(metadataIndex);
     tsFileMetaData.setMetaOffset(metaOffset);
@@ -409,6 +412,92 @@ public class TsFileIOWriter {
     canWrite = false;
   }
 
+  public void endFileV3() throws IOException {
+    long metaOffset = out.getPosition();
+
+    // serialize the SEPARATOR of MetaData
+    ReadWriteIOUtils.write(MetaMarker.SEPARATOR, out.wrapAsStream());
+
+    // group ChunkMetadata by series
+    // only contains ordinary path and time column of vector series
+    Map<Path, List<IChunkMetadata>> chunkMetadataListMap = new TreeMap<>();
+
+    // time column -> ChunkMetadataList TreeMap of value columns in vector
+    Map<Path, Map<Path, List<IChunkMetadata>>> vectorToPathsMap = new HashMap<>();
+
+    for (ChunkGroupMetadata chunkGroupMetadata : chunkGroupMetadataList) {
+      List<ChunkMetadata> chunkMetadatas = chunkGroupMetadata.getChunkMetadataList();
+      int idx = 0;
+      while (idx < chunkMetadatas.size()) {
+        IChunkMetadata chunkMetadata = chunkMetadatas.get(idx);
+        if (chunkMetadata.getMask() == 0) {
+          Path series = new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
+          chunkMetadataListMap.computeIfAbsent(series, k -> new ArrayList<>()).add(chunkMetadata);
+          idx++;
+        } else if (chunkMetadata.isTimeColumn()) {
+          // time column of a vector series
+          Path series = new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
+          chunkMetadataListMap.computeIfAbsent(series, k -> new ArrayList<>()).add(chunkMetadata);
+          idx++;
+          Map<Path, List<IChunkMetadata>> chunkMetadataListMapInVector =
+              vectorToPathsMap.computeIfAbsent(series, key -> new TreeMap<>());
+
+          // value columns of a vector series
+          while (idx < chunkMetadatas.size() && chunkMetadatas.get(idx).isValueColumn()) {
+            chunkMetadata = chunkMetadatas.get(idx);
+            Path vectorSeries =
+                new Path(chunkGroupMetadata.getDevice(), chunkMetadata.getMeasurementUid());
+            chunkMetadataListMapInVector
+                .computeIfAbsent(vectorSeries, k -> new ArrayList<>())
+                .add(chunkMetadata);
+            idx++;
+          }
+        }
+      }
+    }
+
+    // NOTICE: update here, TsFileMetadataV2 does not have MetadataIndexTree
+    // ====================== //
+    TsFileMetadataV2 tsFileMetaData = new TsFileMetadataV2();
+    tsFileMetaData.setMetaOffset(metaOffset);
+
+    TsFileOutput metadataIndexOutput =
+        new LocalTsFileOutput(new FileOutputStream(new File(file.getAbsolutePath() + ".index")));
+    MetadataIndexNode metadataIndex =
+        flushMetadataIndex(chunkMetadataListMap, vectorToPathsMap, metadataIndexOutput);
+    int lastNodeSize = metadataIndex.serializeTo(metadataIndexOutput.wrapAsStream());
+
+    // write the size of last MetadataIndexNode
+    ReadWriteIOUtils.write(lastNodeSize, metadataIndexOutput.wrapAsStream());
+    metadataIndexOutput.close();
+    // ====================== //
+
+    // write TsFileMetaData
+    int size = tsFileMetaData.serializeTo(out.wrapAsStream());
+    if (logger.isDebugEnabled()) {
+      logger.debug("finish flushing the footer {}, file pos:{}", tsFileMetaData, out.getPosition());
+    }
+
+    // write bloom filter
+    size += tsFileMetaData.serializeBloomFilter(out.wrapAsStream(), chunkMetadataListMap.keySet());
+    if (logger.isDebugEnabled()) {
+      logger.debug("finish flushing the bloom filter file pos:{}", out.getPosition());
+    }
+
+    // write TsFileMetaData size
+    ReadWriteIOUtils.write(size, out.wrapAsStream()); // write the size of the file metadata.
+
+    // write magic string
+    out.write(MAGIC_STRING_BYTES);
+
+    // close file
+    out.close();
+    if (resourceLogger.isDebugEnabled() && file != null) {
+      resourceLogger.debug("{} writer is closed.", file.getName());
+    }
+    canWrite = false;
+  }
+
   /**
    * Flush TsFileMetadata, including ChunkMetadataList and TimeseriesMetaData
    *
@@ -419,7 +508,8 @@ public class TsFileIOWriter {
    */
   private MetadataIndexNode flushMetadataIndex(
       Map<Path, List<IChunkMetadata>> chunkMetadataListMap,
-      Map<Path, Map<Path, List<IChunkMetadata>>> vectorToPathsMap)
+      Map<Path, Map<Path, List<IChunkMetadata>>> vectorToPathsMap,
+      TsFileOutput metadataIndexOutput)
       throws IOException {
 
     // convert ChunkMetadataList to this field
@@ -431,7 +521,8 @@ public class TsFileIOWriter {
     }
 
     // construct TsFileMetadata and return
-    return MetadataIndexConstructor.constructMetadataIndex(deviceTimeseriesMetadataMap, out);
+    return MetadataIndexConstructor.constructMetadataIndex(
+        deviceTimeseriesMetadataMap, out, metadataIndexOutput);
   }
 
   private MetadataIndexNode flushMetadataIndexV2(
diff --git a/tsfile/src/test/java/org/apache/iotdb/tsfile/utils/FilePathUtilsTest.java b/tsfile/src/test/java/org/apache/iotdb/tsfile/utils/FilePathUtilsTest.java
index eb0fd69..50bd890 100644
--- a/tsfile/src/test/java/org/apache/iotdb/tsfile/utils/FilePathUtilsTest.java
+++ b/tsfile/src/test/java/org/apache/iotdb/tsfile/utils/FilePathUtilsTest.java
@@ -18,7 +18,7 @@
  */
 package org.apache.iotdb.tsfile.utils;
 
-import org.apache.commons.io.FileUtils;
+// import org.apache.commons.io.FileUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -51,7 +51,7 @@ public class FilePathUtilsTest {
     tsFile = new File(fullPath);
     boolean success = false;
     try {
-      FileUtils.forceMkdirParent(tsFile);
+      // FileUtils.forceMkdirParent(tsFile);
       success = tsFile.createNewFile();
     } catch (IOException e) {
       Assert.fail(e.getMessage());