You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iotdb.apache.org by ma...@apache.org on 2023/02/17 11:49:23 UTC

[iotdb] branch IOTDB-5517 created (now 7fcd8085f9)

This is an automated email from the ASF dual-hosted git repository.

marklau99 pushed a change to branch IOTDB-5517
in repository https://gitbox.apache.org/repos/asf/iotdb.git


      at 7fcd8085f9  collect sector size from system

This branch includes the following new commits:

     new 7fcd8085f9  collect sector size from system

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[iotdb] 01/01: collect sector size from system

Posted by ma...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

marklau99 pushed a commit to branch IOTDB-5517
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit 7fcd8085f91af4c565d4ba1974ba8a2c9da52f09
Author: Liu Xuxin <li...@outlook.com>
AuthorDate: Fri Feb 17 19:49:15 2023 +0800

     collect sector size from system
---
 .../iotdb/confignode/service/ConfigNode.java       |   3 +-
 .../iotdb/metrics/metricsets/disk/DiskMetrics.java |   4 +-
 .../metricsets/disk/IDiskMetricsManager.java       |  87 ++++++++++----
 .../metricsets/disk/LinuxDiskMetricsManager.java   | 128 ++++++++++++++-------
 .../metricsets/disk/MacDiskMetricsManager.java     | 116 +------------------
 .../metricsets/disk/WindowsDiskMetricsManager.java | 116 +------------------
 .../db/service/metrics/DataNodeMetricsHelper.java  |   3 +-
 7 files changed, 161 insertions(+), 296 deletions(-)

diff --git a/confignode/src/main/java/org/apache/iotdb/confignode/service/ConfigNode.java b/confignode/src/main/java/org/apache/iotdb/confignode/service/ConfigNode.java
index 2e8426abcc..139bf1f32d 100644
--- a/confignode/src/main/java/org/apache/iotdb/confignode/service/ConfigNode.java
+++ b/confignode/src/main/java/org/apache/iotdb/confignode/service/ConfigNode.java
@@ -21,6 +21,7 @@ package org.apache.iotdb.confignode.service;
 import org.apache.iotdb.common.rpc.thrift.TConfigNodeLocation;
 import org.apache.iotdb.common.rpc.thrift.TEndPoint;
 import org.apache.iotdb.common.rpc.thrift.TSStatus;
+import org.apache.iotdb.commons.conf.IoTDBConstant;
 import org.apache.iotdb.commons.exception.StartupException;
 import org.apache.iotdb.commons.service.JMXService;
 import org.apache.iotdb.commons.service.RegisterManager;
@@ -232,7 +233,7 @@ public class ConfigNode implements ConfigNodeMBean {
     MetricService.getInstance().addMetricSet(new LogbackMetrics());
     MetricService.getInstance().addMetricSet(new ProcessMetrics());
     MetricService.getInstance().addMetricSet(new SystemMetrics(false));
-    MetricService.getInstance().addMetricSet(new DiskMetrics("ConfigNode"));
+    MetricService.getInstance().addMetricSet(new DiskMetrics(IoTDBConstant.CN_ROLE));
 
     LOGGER.info("Successfully setup internal services.");
   }
diff --git a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/DiskMetrics.java b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/DiskMetrics.java
index 392c27c05f..1ee4f830ae 100644
--- a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/DiskMetrics.java
+++ b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/DiskMetrics.java
@@ -157,7 +157,7 @@ public class DiskMetrics implements IMetricSet {
           DISK_IO_SECTOR_NUM,
           MetricLevel.IMPORTANT,
           diskMetricsManager,
-          x -> x.getAvgSectorCountOfEachReadForDisk().getOrDefault(diskID, 0.0).longValue(),
+          x -> x.getAvgSizeOfEachReadForDisk().getOrDefault(diskID, 0.0).longValue(),
           TYPE,
           READ,
           NAME,
@@ -166,7 +166,7 @@ public class DiskMetrics implements IMetricSet {
           DISK_IO_SECTOR_NUM,
           MetricLevel.IMPORTANT,
           diskMetricsManager,
-          x -> x.getAvgSectorCountOfEachWriteForDisk().getOrDefault(diskID, 0.0).longValue(),
+          x -> x.getAvgSizeOfEachWriteForDisk().getOrDefault(diskID, 0.0).longValue(),
           TYPE,
           WRITE,
           NAME,
diff --git a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/IDiskMetricsManager.java b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/IDiskMetricsManager.java
index 5ba69f04f9..0a2e7c04c0 100644
--- a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/IDiskMetricsManager.java
+++ b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/IDiskMetricsManager.java
@@ -19,54 +19,97 @@
 
 package org.apache.iotdb.metrics.metricsets.disk;
 
+import java.util.Collections;
 import java.util.Map;
 import java.util.Set;
 
 public interface IDiskMetricsManager {
-  Map<String, Long> getReadDataSizeForDisk();
+  default Map<String, Long> getReadDataSizeForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getWriteDataSizeForDisk();
+  default Map<String, Long> getWriteDataSizeForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getReadOperationCountForDisk();
+  default Map<String, Long> getReadOperationCountForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getWriteOperationCountForDisk();
+  default Map<String, Long> getWriteOperationCountForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getMergedWriteOperationForDisk();
+  default Map<String, Long> getMergedWriteOperationForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getMergedReadOperationForDisk();
+  default Map<String, Long> getMergedReadOperationForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getReadCostTimeForDisk();
+  default Map<String, Long> getReadCostTimeForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getWriteCostTimeForDisk();
+  default Map<String, Long> getWriteCostTimeForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getIoUtilsPercentage();
+  default Map<String, Long> getIoUtilsPercentage() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Double> getAvgReadCostTimeOfEachOpsForDisk();
+  default Map<String, Double> getAvgReadCostTimeOfEachOpsForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Double> getAvgWriteCostTimeOfEachOpsForDisk();
+  default Map<String, Double> getAvgWriteCostTimeOfEachOpsForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Double> getAvgSectorCountOfEachReadForDisk();
+  default Map<String, Double> getAvgSizeOfEachReadForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Double> getAvgSectorCountOfEachWriteForDisk();
+  default Map<String, Double> getAvgSizeOfEachWriteForDisk() {
+    return Collections.emptyMap();
+  }
 
-  Map<String, Long> getQueueSizeForDisk();
+  default Map<String, Long> getQueueSizeForDisk() {
+    return Collections.emptyMap();
+  }
 
-  long getActualReadDataSizeForProcess();
+  default long getActualReadDataSizeForProcess() {
+    return 0L;
+  }
 
-  long getActualWriteDataSizeForProcess();
+  default long getActualWriteDataSizeForProcess() {
+    return 0L;
+  }
 
-  long getReadOpsCountForProcess();
+  default long getReadOpsCountForProcess() {
+    return 0L;
+  }
 
-  long getWriteOpsCountForProcess();
+  default long getWriteOpsCountForProcess() {
+    return 0L;
+  }
 
-  long getAttemptReadSizeForProcess();
+  default long getAttemptReadSizeForProcess() {
+    return 0L;
+  }
 
-  long getAttemptWriteSizeForProcess();
+  default long getAttemptWriteSizeForProcess() {
+    return 0L;
+  }
 
-  Set<String> getDiskIds();
+  default Set<String> getDiskIds() {
+    return Collections.emptySet();
+  }
 
   /** Return different implementation of DiskMetricsManager according to OS type. */
-  public static IDiskMetricsManager getDiskMetricsManager() {
+  static IDiskMetricsManager getDiskMetricsManager() {
     String os = System.getProperty("os.name").toLowerCase();
 
     if (os.startsWith("windows")) {
diff --git a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/LinuxDiskMetricsManager.java b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/LinuxDiskMetricsManager.java
index 0e29ce95b8..3c5455a50c 100644
--- a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/LinuxDiskMetricsManager.java
+++ b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/LinuxDiskMetricsManager.java
@@ -29,7 +29,6 @@ import java.io.IOException;
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Objects;
@@ -59,6 +58,9 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
   @SuppressWarnings("squid:S1075")
   private static final String DISK_ID_PATH = "/sys/block";
 
+  @SuppressWarnings("squid:S1075")
+  private static final String DISK_SECTOR_SIZE_PATH = "/sys/block/%s/queue/hw_sector_size";
+
   private final String processIoStatusPath;
   private static final int DISK_ID_OFFSET = 3;
   private static final int DISK_READ_COUNT_OFFSET = 4;
@@ -73,27 +75,28 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
   private static final int DISK_IO_TOTAL_TIME_OFFSET = 13;
   private static final long UPDATE_SMALLEST_INTERVAL = 10000L;
   private Set<String> diskIdSet;
+  private final Map<String, Integer> diskSectorSizeMap;
   private long lastUpdateTime = 0L;
   private long updateInterval = 1L;
 
   // Disk IO status structure
-  private final Map<String, Long> lastReadOperationCountForDisk = new HashMap<>();
-  private final Map<String, Long> lastWriteOperationCountForDisk = new HashMap<>();
-  private final Map<String, Long> lastReadTimeCostForDisk = new HashMap<>();
-  private final Map<String, Long> lastWriteTimeCostForDisk = new HashMap<>();
-  private final Map<String, Long> lastMergedReadCountForDisk = new HashMap<>();
-  private final Map<String, Long> lastMergedWriteCountForDisk = new HashMap<>();
-  private final Map<String, Long> lastReadSectorCountForDisk = new HashMap<>();
-  private final Map<String, Long> lastWriteSectorCountForDisk = new HashMap<>();
-  private final Map<String, Long> queueSizeMap = new HashMap<>();
-  private final Map<String, Long> lastIoBusyTimeForDisk = new HashMap<>();
-  private final Map<String, Long> incrementReadOperationCountForDisk = new HashMap<>();
-  private final Map<String, Long> incrementWriteOperationCountForDisk = new HashMap<>();
-  private final Map<String, Long> incrementReadTimeCostForDisk = new HashMap<>();
-  private final Map<String, Long> incrementWriteTimeCostForDisk = new HashMap<>();
-  private final Map<String, Long> incrementReadSectorCountForDisk = new HashMap<>();
-  private final Map<String, Long> incrementWriteSectorCountForDisk = new HashMap<>();
-  private final Map<String, Long> incrementIoBusyTimeForDisk = new HashMap<>();
+  private final Map<String, Long> lastReadOperationCountForDisk;
+  private final Map<String, Long> lastWriteOperationCountForDisk;
+  private final Map<String, Long> lastReadTimeCostForDisk;
+  private final Map<String, Long> lastWriteTimeCostForDisk;
+  private final Map<String, Long> lastMergedReadCountForDisk;
+  private final Map<String, Long> lastMergedWriteCountForDisk;
+  private final Map<String, Long> lastReadSectorCountForDisk;
+  private final Map<String, Long> lastWriteSectorCountForDisk;
+  private final Map<String, Long> lastIoBusyTimeForDisk;
+  private final Map<String, Long> incrementReadOperationCountForDisk;
+  private final Map<String, Long> incrementWriteOperationCountForDisk;
+  private final Map<String, Long> incrementReadTimeCostForDisk;
+  private final Map<String, Long> incrementWriteTimeCostForDisk;
+  private final Map<String, Long> incrementReadSectorCountForDisk;
+  private final Map<String, Long> incrementWriteSectorCountForDisk;
+  private final Map<String, Long> incrementIoBusyTimeForDisk;
+  private final Map<String, Long> queueSizeMap;
 
   // Process IO status structure
   private long lastReallyReadSizeForProcess = 0L;
@@ -104,19 +107,38 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
   private long lastWriteOpsCountForProcess = 0L;
 
   public LinuxDiskMetricsManager() {
-    super();
     processIoStatusPath =
         String.format(
             "/proc/%s/io", MetricConfigDescriptor.getInstance().getMetricConfig().getPid());
+    collectDiskId();
+    diskSectorSizeMap = new HashMap<>(diskIdSet.size());
+    collectDiskInfo();
+    lastReadOperationCountForDisk = new HashMap<>(diskIdSet.size());
+    lastWriteOperationCountForDisk = new HashMap<>(diskIdSet.size());
+    lastReadTimeCostForDisk = new HashMap<>(diskIdSet.size());
+    lastWriteTimeCostForDisk = new HashMap<>(diskIdSet.size());
+    lastMergedReadCountForDisk = new HashMap<>(diskIdSet.size());
+    lastMergedWriteCountForDisk = new HashMap<>(diskIdSet.size());
+    lastReadSectorCountForDisk = new HashMap<>(diskIdSet.size());
+    lastWriteSectorCountForDisk = new HashMap<>(diskIdSet.size());
+    lastIoBusyTimeForDisk = new HashMap<>(diskIdSet.size());
+    incrementReadOperationCountForDisk = new HashMap<>(diskIdSet.size());
+    incrementWriteOperationCountForDisk = new HashMap<>(diskIdSet.size());
+    incrementReadTimeCostForDisk = new HashMap<>(diskIdSet.size());
+    incrementWriteTimeCostForDisk = new HashMap<>(diskIdSet.size());
+    incrementReadSectorCountForDisk = new HashMap<>(diskIdSet.size());
+    incrementWriteSectorCountForDisk = new HashMap<>(diskIdSet.size());
+    incrementIoBusyTimeForDisk = new HashMap<>(diskIdSet.size());
+    queueSizeMap = new HashMap<>(diskIdSet.size());
   }
 
   @Override
   public Map<String, Long> getReadDataSizeForDisk() {
     checkUpdate();
-    Map<String, Long> readDataMap = new HashMap<>();
+    Map<String, Long> readDataMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> entry : lastReadSectorCountForDisk.entrySet()) {
-      // the data size in each sector is 512 byte
-      readDataMap.put(entry.getKey(), entry.getValue() * 512 / 1024);
+      int sectorSize = diskSectorSizeMap.getOrDefault(entry.getKey(), 512);
+      readDataMap.put(entry.getKey(), entry.getValue() * sectorSize / 1024);
     }
     return readDataMap;
   }
@@ -124,10 +146,10 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
   @Override
   public Map<String, Long> getWriteDataSizeForDisk() {
     checkUpdate();
-    Map<String, Long> writeDataMap = new HashMap<>();
+    Map<String, Long> writeDataMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> entry : lastWriteSectorCountForDisk.entrySet()) {
-      // the data size in each sector is 512 byte
-      writeDataMap.put(entry.getKey(), entry.getValue() * 512 / 1024);
+      int sectorSize = diskSectorSizeMap.getOrDefault(entry.getKey(), 512);
+      writeDataMap.put(entry.getKey(), entry.getValue() * sectorSize / 1024);
     }
     return writeDataMap;
   }
@@ -155,7 +177,7 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
 
   @Override
   public Map<String, Long> getIoUtilsPercentage() {
-    Map<String, Long> utilsMap = new HashMap<>();
+    Map<String, Long> utilsMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> entry : incrementIoBusyTimeForDisk.entrySet()) {
       utilsMap.put(entry.getKey(), (long) (entry.getValue() * 10000.0 / updateInterval));
     }
@@ -164,7 +186,7 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
 
   @Override
   public Map<String, Double> getAvgReadCostTimeOfEachOpsForDisk() {
-    Map<String, Double> avgReadTimeCostMap = new HashMap<>();
+    Map<String, Double> avgReadTimeCostMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> readCostEntry : incrementReadTimeCostForDisk.entrySet()) {
       long writeOpsCount =
           incrementReadOperationCountForDisk.getOrDefault(readCostEntry.getKey(), 1L);
@@ -177,7 +199,7 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
 
   @Override
   public Map<String, Double> getAvgWriteCostTimeOfEachOpsForDisk() {
-    Map<String, Double> avgWriteTimeCostMap = new HashMap<>();
+    Map<String, Double> avgWriteTimeCostMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> writeCostEntry : incrementWriteTimeCostForDisk.entrySet()) {
       long writeOpsCount =
           incrementWriteOperationCountForDisk.getOrDefault(writeCostEntry.getKey(), 1L);
@@ -189,29 +211,32 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
   }
 
   @Override
-  public Map<String, Double> getAvgSectorCountOfEachReadForDisk() {
-    Map<String, Double> avgSectorSizeOfRead = new HashMap<>();
+  public Map<String, Double> getAvgSizeOfEachReadForDisk() {
+    Map<String, Double> avgSizeOfReadMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> readSectorSizeEntry : incrementReadSectorCountForDisk.entrySet()) {
       long readOpsCount =
           incrementReadOperationCountForDisk.getOrDefault(readSectorSizeEntry.getKey(), 1L);
-      avgSectorSizeOfRead.put(
-          readSectorSizeEntry.getKey(), ((double) readSectorSizeEntry.getValue()) / readOpsCount);
+      int sectorSize = diskSectorSizeMap.getOrDefault(readSectorSizeEntry.getKey(), 512);
+      avgSizeOfReadMap.put(
+          readSectorSizeEntry.getKey(),
+          ((double) readSectorSizeEntry.getValue()) * sectorSize / 1024.0 / readOpsCount);
     }
-    return avgSectorSizeOfRead;
+    return avgSizeOfReadMap;
   }
 
   @Override
-  public Map<String, Double> getAvgSectorCountOfEachWriteForDisk() {
-    Map<String, Double> avgSectorSizeOfWrite = new HashMap<>();
+  public Map<String, Double> getAvgSizeOfEachWriteForDisk() {
+    Map<String, Double> avgSizeOfWriteMap = new HashMap<>(diskIdSet.size());
     for (Map.Entry<String, Long> writeSectorSizeEntry :
         incrementWriteSectorCountForDisk.entrySet()) {
       long writeOpsCount =
           incrementWriteOperationCountForDisk.getOrDefault(writeSectorSizeEntry.getKey(), 1L);
-      avgSectorSizeOfWrite.put(
+      int sectorSize = diskSectorSizeMap.getOrDefault(writeSectorSizeEntry.getKey(), 512);
+      avgSizeOfWriteMap.put(
           writeSectorSizeEntry.getKey(),
-          ((double) writeSectorSizeEntry.getValue()) / writeOpsCount);
+          ((double) writeSectorSizeEntry.getValue()) * sectorSize / 1024.0 / writeOpsCount);
     }
-    return avgSectorSizeOfWrite;
+    return avgSizeOfWriteMap;
   }
 
   @Override
@@ -261,9 +286,13 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
 
   @Override
   public Set<String> getDiskIds() {
+    return diskIdSet;
+  }
+
+  private void collectDiskId() {
     File diskIdFolder = new File(DISK_ID_PATH);
     if (!diskIdFolder.exists()) {
-      return Collections.emptySet();
+      return;
     }
     diskIdSet =
         new ArrayList<>(Arrays.asList(Objects.requireNonNull(diskIdFolder.listFiles())))
@@ -271,7 +300,26 @@ public class LinuxDiskMetricsManager implements IDiskMetricsManager {
                 .filter(x -> !x.getName().startsWith("loop") && !x.getName().startsWith("ram"))
                 .map(File::getName)
                 .collect(Collectors.toSet());
-    return diskIdSet;
+  }
+
+  private void collectDiskInfo() {
+    for (String diskId : diskIdSet) {
+      String diskSectorSizePath = String.format(DISK_SECTOR_SIZE_PATH, diskId);
+      File diskSectorSizeFile = new File(diskSectorSizePath);
+      try (Scanner scanner = new Scanner(Files.newInputStream(diskSectorSizeFile.toPath()))) {
+        if (scanner.hasNext()) {
+          int sectorSize = Integer.parseInt(scanner.nextLine());
+          diskSectorSizeMap.put(diskId, sectorSize);
+        } else {
+          // use 512 byte as default value
+          diskSectorSizeMap.put(diskId, 512);
+        }
+      } catch (IOException e) {
+        log.warn("Failed to get the sector size of {}", diskId, e);
+        // use 512 bytes as default value
+        diskSectorSizeMap.put(diskId, 512);
+      }
+    }
   }
 
   private void updateInfo() {
diff --git a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/MacDiskMetricsManager.java b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/MacDiskMetricsManager.java
index 7b212ac119..f5efff5368 100644
--- a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/MacDiskMetricsManager.java
+++ b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/MacDiskMetricsManager.java
@@ -19,119 +19,5 @@
 
 package org.apache.iotdb.metrics.metricsets.disk;
 
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-
 /** Disk Metrics Manager for macOS, not implemented yet. */
-public class MacDiskMetricsManager implements IDiskMetricsManager {
-
-  public MacDiskMetricsManager() {
-    super();
-  }
-
-  @Override
-  public Map<String, Long> getReadDataSizeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getWriteDataSizeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getReadOperationCountForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getWriteOperationCountForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getReadCostTimeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getWriteCostTimeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getIoUtilsPercentage() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgReadCostTimeOfEachOpsForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgWriteCostTimeOfEachOpsForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgSectorCountOfEachReadForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgSectorCountOfEachWriteForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getQueueSizeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getMergedWriteOperationForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getMergedReadOperationForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public long getActualReadDataSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getActualWriteDataSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getReadOpsCountForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getWriteOpsCountForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getAttemptReadSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getAttemptWriteSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public Set<String> getDiskIds() {
-    return Collections.emptySet();
-  }
-}
+public class MacDiskMetricsManager implements IDiskMetricsManager {}
diff --git a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/WindowsDiskMetricsManager.java b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/WindowsDiskMetricsManager.java
index 0668ba49b0..975576ac90 100644
--- a/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/WindowsDiskMetricsManager.java
+++ b/metrics/interface/src/main/java/org/apache/iotdb/metrics/metricsets/disk/WindowsDiskMetricsManager.java
@@ -19,119 +19,5 @@
 
 package org.apache.iotdb.metrics.metricsets.disk;
 
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-
 /** Disk Metrics Manager for Windows system, not implemented yet. */
-public class WindowsDiskMetricsManager implements IDiskMetricsManager {
-
-  public WindowsDiskMetricsManager() {
-    super();
-  }
-
-  @Override
-  public Map<String, Long> getReadDataSizeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getWriteDataSizeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getReadOperationCountForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getWriteOperationCountForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getMergedWriteOperationForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getMergedReadOperationForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getReadCostTimeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getWriteCostTimeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getIoUtilsPercentage() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgReadCostTimeOfEachOpsForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgWriteCostTimeOfEachOpsForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgSectorCountOfEachReadForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Double> getAvgSectorCountOfEachWriteForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public Map<String, Long> getQueueSizeForDisk() {
-    return Collections.emptyMap();
-  }
-
-  @Override
-  public long getActualReadDataSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getActualWriteDataSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getReadOpsCountForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getWriteOpsCountForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getAttemptReadSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public long getAttemptWriteSizeForProcess() {
-    return 0;
-  }
-
-  @Override
-  public Set<String> getDiskIds() {
-    return Collections.emptySet();
-  }
-}
+public class WindowsDiskMetricsManager implements IDiskMetricsManager {}
diff --git a/server/src/main/java/org/apache/iotdb/db/service/metrics/DataNodeMetricsHelper.java b/server/src/main/java/org/apache/iotdb/db/service/metrics/DataNodeMetricsHelper.java
index 1bb74f4851..453f4f9933 100644
--- a/server/src/main/java/org/apache/iotdb/db/service/metrics/DataNodeMetricsHelper.java
+++ b/server/src/main/java/org/apache/iotdb/db/service/metrics/DataNodeMetricsHelper.java
@@ -19,6 +19,7 @@
 
 package org.apache.iotdb.db.service.metrics;
 
+import org.apache.iotdb.commons.conf.IoTDBConstant;
 import org.apache.iotdb.commons.service.metric.MetricService;
 import org.apache.iotdb.db.mpp.metric.DataExchangeCostMetricSet;
 import org.apache.iotdb.db.mpp.metric.DataExchangeCountMetricSet;
@@ -40,7 +41,7 @@ public class DataNodeMetricsHelper {
     MetricService.getInstance().addMetricSet(new CompactionMetrics());
     MetricService.getInstance().addMetricSet(new ProcessMetrics());
     MetricService.getInstance().addMetricSet(new SystemMetrics(true));
-    MetricService.getInstance().addMetricSet(new DiskMetrics("DataNode"));
+    MetricService.getInstance().addMetricSet(new DiskMetrics(IoTDBConstant.DN_ROLE));
 
     // bind query related metrics
     MetricService.getInstance().addMetricSet(new QueryPlanCostMetricSet());