You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by ja...@apache.org on 2022/01/18 07:41:45 UTC
[iotdb] 02/02: change self check
This is an automated email from the ASF dual-hosted git repository.
jackietien pushed a commit to branch AlignedReaderSelfCheck
in repository https://gitbox.apache.org/repos/asf/iotdb.git
commit 0c76ceaf7276270b1b068a5f29b5a77cb103d509
Author: JackieTien97 <ja...@gmail.com>
AuthorDate: Tue Jan 18 15:41:00 2022 +0800
change self check
---
.../iotdb/tsfile/read/TsFileSequenceReader.java | 112 +++++++++++----------
1 file changed, 57 insertions(+), 55 deletions(-)
diff --git a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
index c504174..5e307a8 100644
--- a/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
+++ b/tsfile/src/main/java/org/apache/iotdb/tsfile/read/TsFileSequenceReader.java
@@ -1274,63 +1274,65 @@ public class TsFileSequenceReader implements AutoCloseable {
Statistics<? extends Serializable> chunkStatistics =
Statistics.getStatsByType(dataType);
int dataSize = chunkHeader.getDataSize();
- if (((byte) (chunkHeader.getChunkType() & 0x3F)) == MetaMarker.CHUNK_HEADER) {
- while (dataSize > 0) {
- // a new Page
- PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType(), true);
- chunkStatistics.mergeStatistics(pageHeader.getStatistics());
- this.skipPageData(pageHeader);
- dataSize -= pageHeader.getSerializedPageSize();
- chunkHeader.increasePageNums(1);
- }
- } else {
- // only one page without statistic, we need to iterate each point to generate
- // statistic
- PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType(), false);
- Decoder valueDecoder =
- Decoder.getDecoderByType(
- chunkHeader.getEncodingType(), chunkHeader.getDataType());
- ByteBuffer pageData = readPage(pageHeader, chunkHeader.getCompressionType());
- Decoder timeDecoder =
- Decoder.getDecoderByType(
- TSEncoding.valueOf(
- TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
- TSDataType.INT64);
- PageReader reader =
- new PageReader(
- pageHeader,
- pageData,
- chunkHeader.getDataType(),
- valueDecoder,
- timeDecoder,
- null);
- BatchData batchData = reader.getAllSatisfiedPageData();
- while (batchData.hasCurrent()) {
- switch (dataType) {
- case INT32:
- chunkStatistics.update(batchData.currentTime(), batchData.getInt());
- break;
- case INT64:
- chunkStatistics.update(batchData.currentTime(), batchData.getLong());
- break;
- case FLOAT:
- chunkStatistics.update(batchData.currentTime(), batchData.getFloat());
- break;
- case DOUBLE:
- chunkStatistics.update(batchData.currentTime(), batchData.getDouble());
- break;
- case BOOLEAN:
- chunkStatistics.update(batchData.currentTime(), batchData.getBoolean());
- break;
- case TEXT:
- chunkStatistics.update(batchData.currentTime(), batchData.getBinary());
- break;
- default:
- throw new IOException("Unexpected type " + dataType);
+ if (dataSize > 0) {
+ if (((byte) (chunkHeader.getChunkType() & 0x3F)) == MetaMarker.CHUNK_HEADER) {
+ while (dataSize > 0) {
+ // a new Page
+ PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType(), true);
+ chunkStatistics.mergeStatistics(pageHeader.getStatistics());
+ this.skipPageData(pageHeader);
+ dataSize -= pageHeader.getSerializedPageSize();
+ chunkHeader.increasePageNums(1);
}
- batchData.next();
+ } else {
+ // only one page without statistic, we need to iterate each point to generate
+ // statistic
+ PageHeader pageHeader = this.readPageHeader(chunkHeader.getDataType(), false);
+ Decoder valueDecoder =
+ Decoder.getDecoderByType(
+ chunkHeader.getEncodingType(), chunkHeader.getDataType());
+ ByteBuffer pageData = readPage(pageHeader, chunkHeader.getCompressionType());
+ Decoder timeDecoder =
+ Decoder.getDecoderByType(
+ TSEncoding.valueOf(
+ TSFileDescriptor.getInstance().getConfig().getTimeEncoder()),
+ TSDataType.INT64);
+ PageReader reader =
+ new PageReader(
+ pageHeader,
+ pageData,
+ chunkHeader.getDataType(),
+ valueDecoder,
+ timeDecoder,
+ null);
+ BatchData batchData = reader.getAllSatisfiedPageData();
+ while (batchData.hasCurrent()) {
+ switch (dataType) {
+ case INT32:
+ chunkStatistics.update(batchData.currentTime(), batchData.getInt());
+ break;
+ case INT64:
+ chunkStatistics.update(batchData.currentTime(), batchData.getLong());
+ break;
+ case FLOAT:
+ chunkStatistics.update(batchData.currentTime(), batchData.getFloat());
+ break;
+ case DOUBLE:
+ chunkStatistics.update(batchData.currentTime(), batchData.getDouble());
+ break;
+ case BOOLEAN:
+ chunkStatistics.update(batchData.currentTime(), batchData.getBoolean());
+ break;
+ case TEXT:
+ chunkStatistics.update(batchData.currentTime(), batchData.getBinary());
+ break;
+ default:
+ throw new IOException("Unexpected type " + dataType);
+ }
+ batchData.next();
+ }
+ chunkHeader.increasePageNums(1);
}
- chunkHeader.increasePageNums(1);
}
currentChunk =
new ChunkMetadata(measurementID, dataType, fileOffsetOfChunk, chunkStatistics);