You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@skywalking.apache.org by wu...@apache.org on 2023/03/17 13:01:57 UTC

[skywalking] branch master updated: [Breaking Change] Enhance JDBC storage through merging table and managing day-based table rolling (#10544)

This is an automated email from the ASF dual-hosted git repository.

wusheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/skywalking.git


The following commit(s) were added to refs/heads/master by this push:
     new 74a8589307 [Breaking Change] Enhance JDBC storage through merging table and managing day-based table rolling (#10544)
74a8589307 is described below

commit 74a85893076ff6371cd0c2304c29ecdd123bb4bd
Author: kezhenxu94 <ke...@apache.org>
AuthorDate: Fri Mar 17 21:01:48 2023 +0800

    [Breaking Change] Enhance JDBC storage through merging table and managing day-based table rolling (#10544)
    
    * [Breaking Change] Enhance JDBC storage through merging tables and managing day-based table rolling.
    
    * [Breaking Change] Sharding-MySQL implementations and tests get removed due to we have the day-based rolling mechanism by default
---
 .github/workflows/skywalking.yaml                  |  22 -
 docs/en/changes/changes.md                         |   2 +
 docs/en/setup/backend/backend-setup.md             |   4 +-
 docs/en/setup/backend/backend-storage.md           |  58 +-
 docs/en/setup/backend/configuration-vocabulary.md  |   6 -
 oap-server-bom/pom.xml                             |   6 +
 .../apache/skywalking/oap/server/core/Const.java   |   1 +
 .../analysis/manual/endpoint/EndpointTraffic.java  |  13 +-
 .../analysis/manual/instance/InstanceTraffic.java  |  14 +-
 .../server/core/analysis/manual/log/LogRecord.java |   3 -
 .../manual/networkalias/NetworkAddressAlias.java   |   3 -
 .../analysis/manual/process/ProcessTraffic.java    |   3 -
 .../manual/process/ServiceLabelRecord.java         |   3 -
 .../EndpointRelationServerSideMetrics.java         |   6 -
 .../ServiceInstanceRelationClientSideMetrics.java  |   6 -
 .../ServiceInstanceRelationServerSideMetrics.java  |   6 -
 .../process/ProcessRelationClientSideMetrics.java  |   3 -
 .../process/ProcessRelationServerSideMetrics.java  |   3 -
 .../service/ServiceRelationClientSideMetrics.java  |   6 -
 .../service/ServiceRelationServerSideMetrics.java  |   6 -
 .../manual/searchtag/TagAutocompleteData.java      |   3 -
 .../analysis/manual/segment/SegmentRecord.java     |   8 +-
 .../analysis/manual/service/ServiceTraffic.java    |  10 +-
 .../oap/server/core/analysis/metrics/Event.java    |   3 -
 .../oap/server/core/analysis/metrics/Metrics.java  |   6 -
 .../analysis/worker/ManagementStreamProcessor.java |   9 +-
 .../analysis/worker/MetricsStreamProcessor.java    |  20 +-
 .../core/analysis/worker/NoneStreamProcessor.java  |   2 +-
 .../analysis/worker/RecordStreamProcessor.java     |   2 +-
 .../core/analysis/worker/TopNStreamProcessor.java  |   2 +-
 .../manual/errorlog/BrowserErrorLogRecord.java     |   6 -
 .../profiling/ebpf/EBPFProfilingQueryService.java  |  30 +-
 .../ebpf/storage/EBPFProfilingDataRecord.java      |   4 +-
 .../ebpf/storage/EBPFProfilingScheduleRecord.java  |   5 +-
 .../trace/ProfileThreadSnapshotRecord.java         |   6 +-
 .../oap/server/core/query/DurationUtils.java       |   6 +-
 .../server/core/query/ProcessTopologyBuilder.java  |  22 +-
 .../server/core/query/TopologyQueryService.java    |  13 +-
 .../oap/server/core/query/input/Duration.java      |   3 +-
 .../oap/server/core/query/type/Alarms.java         |   7 +-
 .../server/core/query/type/BrowserErrorLogs.java   |   5 +-
 .../oap/server/core/query/type/Logs.java           |   9 +-
 .../oap/server/core/query/type/Record.java         |   2 +-
 .../oap/server/core/query/type/TraceBrief.java     |   7 +-
 .../oap/server/core/query/type/event/Events.java   |  13 +-
 .../oap/server/core/source/ScopeDefaultColumn.java |   3 +-
 .../oap/server/core/storage/IMetricsDAO.java       |   9 +-
 .../oap/server/core/storage/ShardingAlgorithm.java | 189 ------
 .../core/storage/annotation/SQLDatabase.java       |  38 +-
 .../oap/server/core/storage/model/Model.java       |  39 +-
 .../oap/server/core/storage/model/ModelColumn.java |   9 +-
 .../server/core/storage/model/ModelCreator.java    |   2 +-
 .../storage/model/SQLDatabaseModelExtension.java   |  27 +-
 .../server/core/storage/model/StorageModels.java   |  59 +-
 .../server/core/storage/query/IZipkinQueryDAO.java |   7 +-
 .../core/storage/ttl/DataTTLKeeperTimer.java       |  16 +-
 .../core/zipkin/ZipkinServiceRelationTraffic.java  |   3 -
 .../core/zipkin/ZipkinServiceSpanTraffic.java      |   3 -
 .../server/core/zipkin/ZipkinServiceTraffic.java   |   3 -
 .../oap/server/core/zipkin/ZipkinSpanRecord.java   |   3 -
 .../core/storage/model/StorageModelsTest.java      |   9 +-
 .../library/client/jdbc/hikaricp/JDBCClient.java   | 187 ++++++
 .../client/jdbc/hikaricp/JDBCHikariCPClient.java   | 161 -----
 .../query/promql/rt/PromQLExprQueryVisitor.java    |   4 +-
 .../resolver/EBPFProcessProfilingQuery.java        |   4 +-
 .../query/graphql/resolver/TopNRecordsQuery.java   |   7 +-
 .../oap/query/graphql/resolver/TopologyQuery.java  |   2 +-
 oap-server/server-starter/pom.xml                  |  10 -
 .../src/main/resources/application.yml             |  28 +-
 .../config/ApplicationConfigLoaderTestCase.java    |   2 +-
 oap-server/server-storage-plugin/pom.xml           |   2 -
 .../plugin/elasticsearch/base/IndexController.java |  34 +-
 .../elasticsearch/base/StorageEsInstaller.java     |  17 +-
 .../elasticsearch/query/AggregationQueryEsDAO.java |   7 +-
 .../elasticsearch/query/ESEventQueryDAO.java       |  20 +-
 .../elasticsearch/query/RecordsQueryEsDAO.java     |  16 +-
 .../elasticsearch/base/TimeSeriesUtilsTest.java    |   6 +-
 .../storage-jdbc-hikaricp-plugin/pom.xml           |  11 +-
 .../storage/plugin/jdbc/BatchSQLExecutor.java      |  40 +-
 .../oap/server/storage/plugin/jdbc/SQLBuilder.java |  15 +-
 .../server/storage/plugin/jdbc/SQLExecutor.java    |  16 +-
 .../server/storage/plugin/jdbc/TableMetaInfo.java  |   5 +-
 .../plugin/jdbc/common/JDBCEntityConverters.java}  |  27 +-
 .../plugin/jdbc/common/JDBCStorageProvider.java    |  62 +-
 .../plugin/jdbc/common/JDBCTableInstaller.java     | 277 ++++++++
 .../plugin/jdbc/common/SQLAndParameters.java}      |  26 +-
 .../storage/plugin/jdbc/common/TableHelper.java    | 183 ++++++
 .../jdbc/common/dao/JDBCAggregationQueryDAO.java   | 132 ++--
 .../plugin/jdbc/common/dao/JDBCAlarmQueryDAO.java  | 177 ++---
 .../plugin/jdbc/common/dao/JDBCBatchDAO.java       |  51 +-
 .../jdbc/common/dao/JDBCBrowserLogQueryDAO.java    | 108 +--
 .../jdbc/common/dao/JDBCEBPFProfilingDataDAO.java  |  93 ++-
 .../common/dao/JDBCEBPFProfilingScheduleDAO.java   |  74 ++-
 .../jdbc/common/dao/JDBCEBPFProfilingTaskDAO.java  | 162 +++--
 .../plugin/jdbc/common/dao/JDBCEventQueryDAO.java  | 127 ++--
 .../jdbc/common/dao/JDBCHistoryDeleteDAO.java      | 123 ++--
 .../plugin/jdbc/common/dao/JDBCLogQueryDAO.java    | 204 +++---
 .../plugin/jdbc/common/dao/JDBCManagementDAO.java  |  15 +-
 .../jdbc/common/dao/JDBCMetadataQueryDAO.java      | 607 ++++++++++-------
 .../plugin/jdbc/common/dao/JDBCMetricsDAO.java     |  31 +-
 .../jdbc/common/dao/JDBCMetricsQueryDAO.java       | 331 +++++-----
 .../common/dao/JDBCNetworkAddressAliasDAO.java     |  76 ++-
 .../plugin/jdbc/common/dao/JDBCNoneStreamDAO.java  |  15 +-
 .../common/dao/JDBCProfileTaskLogQueryDAO.java     |  69 +-
 .../jdbc/common/dao/JDBCProfileTaskQueryDAO.java   | 156 +++--
 .../dao/JDBCProfileThreadSnapshotQueryDAO.java     | 322 +++++----
 .../plugin/jdbc/common/dao/JDBCRecordDAO.java      |  11 +-
 .../jdbc/common/dao/JDBCRecordsQueryDAO.java       |  84 ++-
 .../plugin/jdbc/common/dao/JDBCSQLExecutor.java    | 231 +++----
 .../jdbc/common/dao/JDBCServiceLabelQueryDAO.java  |  54 +-
 .../common/dao/JDBCSpanAttachedEventQueryDAO.java  | 113 ++--
 .../plugin/jdbc/common/dao/JDBCStorageDAO.java     |   4 +-
 .../common/dao/JDBCTagAutoCompleteQueryDAO.java    | 101 ++-
 .../jdbc/common/dao/JDBCTopologyQueryDAO.java      | 226 ++++---
 .../plugin/jdbc/common/dao/JDBCTraceQueryDAO.java  | 308 +++++----
 .../common/dao/JDBCUITemplateManagementDAO.java    | 103 +--
 .../plugin/jdbc/common/dao/JDBCZipkinQueryDAO.java | 405 +++++++-----
 .../storage/plugin/jdbc/h2/H2TableInstaller.java   | 142 +---
 .../plugin/jdbc/mysql/MySQLTableInstaller.java     | 105 +--
 .../jdbc/postgresql/PostgreSQLStorageProvider.java |   4 +-
 .../jdbc/postgresql/PostgreSQLTableInstaller.java  |  91 +--
 .../dao/PostgreSQLAggregationQueryDAO.java         |   8 +-
 .../postgresql/dao/PostgreSQLMetricsQueryDAO.java  |   7 +-
 .../jdbc/common/dao/JDBCHistoryDeleteDAOIT.java    | 143 ++++
 .../storage-shardingsphere-plugin/pom.xml          |  55 --
 .../jdbc/shardingsphere/DurationWithinTTL.java     | 140 ----
 .../plugin/jdbc/shardingsphere/ShardingRule.java   |  64 --
 .../jdbc/shardingsphere/ShardingRulesOperator.java | 283 --------
 .../ShardingSphereTableInstaller.java              | 114 ----
 .../dao/ShardingAggregationQueryDAO.java           |  50 --
 .../dao/ShardingBrowserLogQueryDAO.java            |  52 --
 .../dao/ShardingHistoryDeleteDAO.java              | 147 -----
 .../shardingsphere/dao/ShardingLogQueryDAO.java    |  66 --
 .../dao/ShardingMetricsQueryDAO.java               | 102 ---
 .../dao/ShardingTopologyQueryDAO.java              |  91 ---
 .../shardingsphere/dao/ShardingTraceQueryDAO.java  |  68 --
 .../shardingsphere/dao/ShardingZipkinQueryDAO.java |  53 --
 .../mysql/MySQLShardingStorageConfig.java          |  39 --
 .../mysql/MySQLShardingStorageProvider.java        | 136 ----
 ...alking.oap.server.library.module.ModuleProvider |  19 -
 .../jdbc/shardingsphere/DurationWithinTTLTest.java | 128 ----
 .../jdbc/shardingsphere/ServiceCpmMetrics.java     | 104 ---
 .../shardingsphere/ServiceCpmMetricsBuilder.java   |  46 --
 .../jdbc/shardingsphere/ShardingRulesTest.java     | 102 ---
 .../jdbc/shardingsphere/ShardingSphereIT.java      | 722 ---------------------
 .../test/resources/conf-mysql/config-sharding.yaml |  45 --
 .../src/test/resources/conf-mysql/server.yaml      |  78 ---
 .../src/test/resources/docker-compose-mysql.yml    |  60 --
 .../src/test/resources/download-mysql.sh           |  32 -
 .../src/test/resources/log4j2.xml                  |  31 -
 .../storage-tidb-plugin/pom.xml                    |  51 --
 .../plugin/jdbc/tidb/TiDBStorageProvider.java      |  55 --
 .../plugin/jdbc/tidb/dao/TiDBHistoryDeleteDAO.java |  87 ---
 ...alking.oap.server.library.module.ModuleProvider |  19 -
 .../src/main/resources/application.yml             |   2 +-
 test/e2e-v2/cases/alarm/h2/docker-compose.yml      |   2 +-
 .../browser/mysql/sharding/docker-compose.yml      | 165 -----
 test/e2e-v2/cases/browser/mysql/sharding/e2e.yaml  |  40 --
 test/e2e-v2/cases/event/h2/docker-compose.yml      |   2 +-
 test/e2e-v2/cases/log/h2/docker-compose.yml        |   2 +-
 test/e2e-v2/cases/log/mysql/docker-compose.yml     |   2 +-
 .../cases/log/mysql/sharding/docker-compose.yml    |  90 ---
 test/e2e-v2/cases/log/mysql/sharding/e2e.yaml      |  47 --
 .../cases/mysql/mysql-slowsql/docker-compose.yaml  |   2 +-
 .../profiling/ebpf/network/h2/docker-compose.yml   |   4 +-
 .../profiling/ebpf/network/tidb/docker-compose.yml |  91 ---
 .../cases/profiling/ebpf/network/tidb/e2e.yaml     |  44 --
 .../profiling/ebpf/oncpu/h2/docker-compose.yml     |   4 +-
 .../profiling/ebpf/oncpu/tidb/docker-compose.yml   |  69 --
 .../cases/profiling/ebpf/oncpu/tidb/e2e.yaml       |  37 --
 .../cases/profiling/trace/h2/docker-compose.yml    |   2 +-
 .../cases/profiling/trace/tidb/docker-compose.yml  |  60 --
 test/e2e-v2/cases/profiling/trace/tidb/e2e.yaml    |  37 --
 test/e2e-v2/cases/storage/h2/docker-compose.yml    |   2 +-
 test/e2e-v2/cases/storage/mysql/docker-compose.yml |   2 +-
 .../storage/mysql/sharding/docker-compose.yml      | 103 ---
 test/e2e-v2/cases/storage/mysql/sharding/e2e.yaml  |  50 --
 test/e2e-v2/cases/storage/tidb/docker-compose.yml  |  76 ---
 test/e2e-v2/cases/storage/tidb/e2e.yaml            |  50 --
 .../e2e-v2/cases/storage/tidb/tidbconfig/tidb.toml | 254 --------
 .../e2e-v2/cases/ttl/postgresql/docker-compose.yml |  70 --
 test/e2e-v2/cases/ttl/postgresql/e2e.yaml          |  37 --
 test/e2e-v2/cases/ttl/tidb/docker-compose.yml      |  72 --
 test/e2e-v2/cases/ttl/tidb/e2e.yaml                |  37 --
 test/e2e-v2/cases/zipkin/h2/docker-compose.yml     |   2 +-
 test/e2e-v2/cases/zipkin/mysql/docker-compose.yml  |   2 +-
 .../cases/zipkin/mysql/sharding/docker-compose.yml | 102 ---
 test/e2e-v2/cases/zipkin/mysql/sharding/e2e.yaml   |  47 --
 test/e2e-v2/script/docker-compose/base-compose.yml |   2 +-
 test/e2e-v2/script/prepare/setup-oap/log4j2.xml    |   2 +
 .../script/shardingsphere-proxy/base-compose.yml   |  35 -
 .../conf-mysql/config-sharding.yaml                |  45 --
 .../shardingsphere-proxy/conf-mysql/server.yaml    |  77 ---
 tools/profile-exporter/application.yml             |   4 +-
 194 files changed, 3869 insertions(+), 7921 deletions(-)

diff --git a/.github/workflows/skywalking.yaml b/.github/workflows/skywalking.yaml
index 0beb4ef5c8..f90552885c 100644
--- a/.github/workflows/skywalking.yaml
+++ b/.github/workflows/skywalking.yaml
@@ -283,8 +283,6 @@ jobs:
     strategy:
       matrix:
         test:
-          - name: ShardingSphere
-            class: org.apache.skywalking.oap.server.storage.plugin.jdbc.shardingsphere.ShardingSphereIT
           - name: ElasticSearch / OpenSearch
             class: org.apache.skywalking.library.elasticsearch.ElasticSearchIT
     steps:
@@ -344,8 +342,6 @@ jobs:
             config: test/e2e-v2/cases/storage/h2/e2e.yaml
           - name: Storage MySQL
             config: test/e2e-v2/cases/storage/mysql/e2e.yaml
-          - name: Storage TiDB
-            config: test/e2e-v2/cases/storage/tidb/e2e.yaml
           - name: Storage PostgreSQL
             config: test/e2e-v2/cases/storage/postgres/e2e.yaml
           - name: Storage ES 6.3.2
@@ -395,10 +391,6 @@ jobs:
           - name: Alarm PostgreSQL
             config: test/e2e-v2/cases/alarm/postgres/e2e.yaml
 
-          - name: TTL TiDB
-            config: test/e2e-v2/cases/ttl/tidb/e2e.yaml
-          - name: TTL PostgreSQL
-            config: test/e2e-v2/cases/ttl/postgresql/e2e.yaml
           - name: TTL ES 6.3.2
             config: test/e2e-v2/cases/ttl/es/e2e.yaml
             env: ES_VERSION=6.3.2
@@ -480,8 +472,6 @@ jobs:
             config: test/e2e-v2/cases/profiling/trace/mysql/e2e.yaml
           - name: Trace Profiling Postgres
             config: test/e2e-v2/cases/profiling/trace/postgres/e2e.yaml
-          - name: Trace Profiling TiDB
-            config: test/e2e-v2/cases/profiling/trace/tidb/e2e.yaml
           - name: Trace Profiling OpenSearch 1.1.0
             config: test/e2e-v2/cases/profiling/trace/opensearch/e2e.yaml
             env: OPENSEARCH_VERSION=1.1.0
@@ -504,8 +494,6 @@ jobs:
             config: test/e2e-v2/cases/profiling/ebpf/oncpu/mysql/e2e.yaml
           - name: eBPF Profiling On CPU Postgres
             config: test/e2e-v2/cases/profiling/ebpf/oncpu/postgres/e2e.yaml
-          - name: eBPF Profiling On CPU TiDB
-            config: test/e2e-v2/cases/profiling/ebpf/oncpu/tidb/e2e.yaml
           - name: eBPF Profiling On CPU OpenSearch 1.1.0
             config: test/e2e-v2/cases/profiling/ebpf/oncpu/opensearch/e2e.yaml
             env: OPENSEARCH_VERSION=1.1.0
@@ -530,8 +518,6 @@ jobs:
             config: test/e2e-v2/cases/profiling/ebpf/network/mysql/e2e.yaml
           - name: eBPF Profiling Network Postgres
             config: test/e2e-v2/cases/profiling/ebpf/network/postgres/e2e.yaml
-          - name: eBPF Profiling Network TiDB
-            config: test/e2e-v2/cases/profiling/ebpf/network/tidb/e2e.yaml
           - name: eBPF Profiling Network OpenSearch 1.1.0
             config: test/e2e-v2/cases/profiling/ebpf/network/opensearch/e2e.yaml
             env: OPENSEARCH_VERSION=1.1.0
@@ -651,14 +637,6 @@ jobs:
           - name: Zipkin BanyanDB
             config: test/e2e-v2/cases/zipkin/banyandb/e2e.yaml
 
-          - name: MySQL-Sharding storage
-            config: test/e2e-v2/cases/storage/mysql/sharding/e2e.yaml
-          - name: MySQL-Sharding log
-            config: test/e2e-v2/cases/log/mysql/sharding/e2e.yaml
-          - name: MySQL-Sharding browser
-            config: test/e2e-v2/cases/browser/mysql/sharding/e2e.yaml
-          - name: MySQL-Sharding zipkin
-            config: test/e2e-v2/cases/zipkin/mysql/sharding/e2e.yaml
           - name: APISIX metrics
             config: test/e2e-v2/cases/apisix/otel-collector/e2e.yaml
           - name: Exporter Kafka
diff --git a/docs/en/changes/changes.md b/docs/en/changes/changes.md
index 44b62ae03c..810e21a8de 100644
--- a/docs/en/changes/changes.md
+++ b/docs/en/changes/changes.md
@@ -11,6 +11,8 @@
 * Support to bind TLS status as a part of component for service topology.
 * Fix component ID priority bug.
 * Fix component ID of topology overlap due to storage layer bugs.
+* [Breaking Change] Enhance JDBC storage through merging tables and managing day-based table rolling.
+* [Breaking Change] Sharding-MySQL implementations and tests get removed due to we have the day-based rolling mechanism by default
 
 #### Documentation
 
diff --git a/docs/en/setup/backend/backend-setup.md b/docs/en/setup/backend/backend-setup.md
index a4d4605310..ef5bc0a3e9 100755
--- a/docs/en/setup/backend/backend-setup.md
+++ b/docs/en/setup/backend/backend-setup.md
@@ -61,12 +61,12 @@ storage:
   selector: mysql # the mysql storage will actually be activated, while the h2 storage takes no effect
   h2:
     properties:
-      jdbcUrl: ${SW_STORAGE_H2_URL:jdbc:h2:mem:skywalking-oap-db;DB_CLOSE_DELAY=-1}
+      jdbcUrl: ${SW_STORAGE_H2_URL:jdbc:h2:mem:skywalking-oap-db;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=FALSE}
       dataSource.user: ${SW_STORAGE_H2_USER:sa}
     metadataQueryMaxSize: ${SW_STORAGE_H2_QUERY_MAX_SIZE:5000}
   mysql:
     properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:3306/swtest"}
+      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:3306/swtest?allowMultiQueries=true"}
       dataSource.user: ${SW_DATA_SOURCE_USER:root}
       dataSource.password: ${SW_DATA_SOURCE_PASSWORD:root@1234}
       dataSource.cachePrepStmts: ${SW_DATA_SOURCE_CACHE_PREP_STMTS:true}
diff --git a/docs/en/setup/backend/backend-storage.md b/docs/en/setup/backend/backend-storage.md
index 5abe9faf3e..9ae0361fce 100644
--- a/docs/en/setup/backend/backend-storage.md
+++ b/docs/en/setup/backend/backend-storage.md
@@ -260,7 +260,7 @@ storage:
   selector: ${SW_STORAGE:mysql}
   mysql:
     properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:3306/swtest?rewriteBatchedStatements=true"}
+      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:3306/swtest?rewriteBatchedStatements=true&allowMultiQueries=true"}
       dataSource.user: ${SW_DATA_SOURCE_USER:root}
       dataSource.password: ${SW_DATA_SOURCE_PASSWORD:root@1234}
       dataSource.cachePrepStmts: ${SW_DATA_SOURCE_CACHE_PREP_STMTS:true}
@@ -275,60 +275,8 @@ All connection-related settings, including URL link, username, and password, are
 Only part of the settings is listed here. See the [HikariCP](https://github.com/brettwooldridge/HikariCP) connection pool document for full settings.
 To understand the function of the parameter `rewriteBatchedStatements=true` in MySQL, see the [MySQL official document](https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-connp-props-performance-extensions.html#cj-conn-prop_rewriteBatchedStatements).
 
-## MySQL-Sharding
-MySQL-Sharding plugin provides the MySQL database sharding and table sharding, this feature
-leverage [Shardingsphere-Proxy](https://shardingsphere.apache.org/document/current/en/overview/#shardingsphere-proxy)
-to manage the JDBC between OAP and multi-database instances, and according to the sharding rules do routing to the database and table sharding.
-
-Tested Shardingsphere-Proxy 5.3.1 version, and MySQL Client driver 8.0.13 version is currently available.
-Activate MySQL and Shardingsphere-Proxy as storage, and set storage provider to **mysql-sharding**.
-
-**NOTE:** MySQL driver is NOT allowed in Apache official distribution and source codes.
-Please download the MySQL driver on your own. Copy the connection driver jar to `oap-libs`.
-
-```yaml
-storage:
-  selector: ${SW_STORAGE:mysql-sharding}
-  mysql-sharding:
-    properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:13307/swtest?rewriteBatchedStatements=true"}
-      dataSource.user: ${SW_DATA_SOURCE_USER:root}
-      dataSource.password: ${SW_DATA_SOURCE_PASSWORD:root}
-    metadataQueryMaxSize: ${SW_STORAGE_MYSQL_QUERY_MAX_SIZE:5000}
-    maxSizeOfBatchSql: ${SW_STORAGE_MAX_SIZE_OF_BATCH_SQL:2000}
-    asyncBatchPersistentPoolSize: ${SW_STORAGE_ASYNC_BATCH_PERSISTENT_POOL_SIZE:4}
-    # The dataSources are configured in ShardingSphere-Proxy config-sharding.yaml
-    # The dataSource name should include the prefix "ds_" and separated by ","
-    dataSources: ${SW_JDBC_SHARDING_DATA_SOURCES:ds_0,ds_1}
-
-```
-
-## TiDB
-Tested TiDB Server 4.0.8 version, and MySQL Client driver 8.0.13 version is currently available.
-Activate TiDB as storage, and set storage provider to **tidb**.
-
-```yaml
-storage:
-  selector: ${SW_STORAGE:tidb}
-  tidb:
-    properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:4000/swtest?rewriteBatchedStatements=true"}
-      dataSource.user: ${SW_DATA_SOURCE_USER:root}
-      dataSource.password: ${SW_DATA_SOURCE_PASSWORD:""}
-      dataSource.cachePrepStmts: ${SW_DATA_SOURCE_CACHE_PREP_STMTS:true}
-      dataSource.prepStmtCacheSize: ${SW_DATA_SOURCE_PREP_STMT_CACHE_SQL_SIZE:250}
-      dataSource.prepStmtCacheSqlLimit: ${SW_DATA_SOURCE_PREP_STMT_CACHE_SQL_LIMIT:2048}
-      dataSource.useServerPrepStmts: ${SW_DATA_SOURCE_USE_SERVER_PREP_STMTS:true}
-      dataSource.useAffectedRows: ${SW_DATA_SOURCE_USE_AFFECTED_ROWS:true}
-    metadataQueryMaxSize: ${SW_STORAGE_MYSQL_QUERY_MAX_SIZE:5000}
-    maxSizeOfArrayColumn: ${SW_STORAGE_MAX_SIZE_OF_ARRAY_COLUMN:20}
-    numOfSearchableValuesPerTag: ${SW_STORAGE_NUM_OF_SEARCHABLE_VALUES_PER_TAG:2}
-    maxSizeOfBatchSql: ${SW_STORAGE_MAX_SIZE_OF_BATCH_SQL:2000}
-    asyncBatchPersistentPoolSize: ${SW_STORAGE_ASYNC_BATCH_PERSISTENT_POOL_SIZE:4}
-```
-All connection-related settings, including URL link, username, and password are found in `application.yml`.
-For details on settings, refer to the configuration of *MySQL* above.
-To understand the function of the parameter `rewriteBatchedStatements=true` in TiDB, see the document of [TiDB best practices](https://docs.pingcap.com/tidb/stable/java-app-best-practices#use-batch-api).
+In theory, all other databases that are compatible with MySQL protocol should be able to use this storage plugin,
+such as TiDB. Please compose the JDBC URL according to the database's documentation.
 
 ## PostgreSQL
 PostgreSQL JDBC driver uses version 42.3.2. It supports PostgreSQL 8.2 or newer.
diff --git a/docs/en/setup/backend/configuration-vocabulary.md b/docs/en/setup/backend/configuration-vocabulary.md
index 68608ce79c..be3a1cf0fb 100644
--- a/docs/en/setup/backend/configuration-vocabulary.md
+++ b/docs/en/setup/backend/configuration-vocabulary.md
@@ -129,12 +129,6 @@ The Configuration Vocabulary lists all available configurations provided by `app
 | -                       | -             | metadataQueryMaxSize                                                                                                                                                     | The maximum size of metadata per query.                                                                                                                                                                                                                                               [...]
 | -                       | -             | maxSizeOfBatchSql                                                                                                                                                        | The maximum size of batch size of SQL execution                                                                                                                                                                                                                                       [...]
 | -                       | -             | asyncBatchPersistentPoolSize                                                                                                                                             | async flush data into database thread size                                                                                                                                                                                                                                            [...]
-| -                       | mysql-sharding | -                                                                                                                                                                        | Sharding-Proxy for MySQL properties. The MySQL JDBC Driver is not in the dist. Please copy it into the oap-lib folder manually.                                                                                                                                                      [...]
-| -                       | -             | properties                                                                                                                                                               | Hikari connection pool configurations.                                                                                                                                                                                                                                                [...]
-| -                       | -             | metadataQueryMaxSize                                                                                                                                                     | The maximum size of metadata per query.                                                                                                                                                                                                                                               [...]
-| -                       | -             | maxSizeOfBatchSql                                                                                                                                                        | The maximum size of batch size of SQL execution                                                                                                                                                                                                                                       [...]
-| -                       | -             | asyncBatchPersistentPoolSize                                                                                                                                             | async flush data into database thread size                                                                                                                                                                                                                                            [...]
-| -                       | -             | dataSources                                                                                                                                             |  The dataSources are configured in ShardingSphere-Proxy config-sharding.yaml.The dataSource name should include the prefix "ds_" and separated by "," and start from ds_0                                                                                                                              [...]
 | -                       | postgresql    | -                                                                                                                                                                        | PostgreSQL storage.                                                                                                                                                                                                                                                                   [...]
 | -                       | -             | properties                                                                                                                                                               | Hikari connection pool configurations.                                                                                                                                                                                                                                                [...]
 | -                       | -             | metadataQueryMaxSize                                                                                                                                                     | The maximum size of metadata per query.                                                                                                                                                                                                                                               [...]
diff --git a/oap-server-bom/pom.xml b/oap-server-bom/pom.xml
index 06152588e6..f038479d31 100644
--- a/oap-server-bom/pom.xml
+++ b/oap-server-bom/pom.xml
@@ -553,6 +553,12 @@
                 <version>${testcontainers.version}</version>
                 <scope>test</scope>
             </dependency>
+            <dependency>
+                <groupId>org.testcontainers</groupId>
+                <artifactId>postgresql</artifactId>
+                <version>${testcontainers.version}</version>
+                <scope>test</scope>
+            </dependency>
             <dependency>
                 <groupId>org.testcontainers</groupId>
                 <artifactId>junit-jupiter</artifactId>
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/Const.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/Const.java
index d61ead60d2..8a51c4316e 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/Const.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/Const.java
@@ -27,6 +27,7 @@ public class Const {
     public static final String RELATION_ID_CONNECTOR = "-";
     public static final String RELATION_ID_PARSER_SPLIT = "\\-";
     public static final String LINE = "-";
+    public static final String UNDERSCORE = "_";
     public static final String COMMA = ",";
     public static final String SPACE = " ";
     public static final String KEY_VALUE_SPLIT = ",";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/endpoint/EndpointTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/endpoint/EndpointTraffic.java
index 1517a115a5..a8c72c9363 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/endpoint/EndpointTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/endpoint/EndpointTraffic.java
@@ -18,6 +18,10 @@
 
 package org.apache.skywalking.oap.server.core.analysis.manual.endpoint;
 
+import com.google.common.base.Strings;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.Setter;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.analysis.MetricsExtension;
@@ -26,27 +30,18 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
 import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import com.google.common.base.Strings;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.Setter;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.TIME_BUCKET;
 
 @Stream(name = EndpointTraffic.INDEX_NAME, scopeId = DefaultScopeDefine.ENDPOINT,
     builder = EndpointTraffic.Builder.class, processor = MetricsStreamProcessor.class)
 @MetricsExtension(supportDownSampling = false, supportUpdate = false)
 @EqualsAndHashCode
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_MIN_RANGE_SHARDING_ALGORITHM, tableShardingColumn = TIME_BUCKET, dataSourceShardingColumn = ID)
 public class EndpointTraffic extends Metrics {
 
     public static final String INDEX_NAME = "endpoint_traffic";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/instance/InstanceTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/instance/InstanceTraffic.java
index 1bafe6d835..24f3bb0d62 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/instance/InstanceTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/instance/InstanceTraffic.java
@@ -18,6 +18,11 @@
 
 package org.apache.skywalking.oap.server.core.analysis.manual.instance;
 
+import com.google.gson.Gson;
+import com.google.gson.JsonObject;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.Setter;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.analysis.MetricsExtension;
@@ -25,21 +30,15 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
 import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
-import com.google.gson.Gson;
-import com.google.gson.JsonObject;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.Setter;
+
 import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.SERVICE_INSTANCE;
 
 @Stream(name = InstanceTraffic.INDEX_NAME, scopeId = SERVICE_INSTANCE,
@@ -49,7 +48,6 @@ import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.SE
     "serviceId",
     "name"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class InstanceTraffic extends Metrics {
     public static final String INDEX_NAME = "instance_traffic";
     public static final String SERVICE_ID = "service_id";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/log/LogRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/log/LogRecord.java
index db28356a65..6715948e10 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/log/LogRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/log/LogRecord.java
@@ -22,7 +22,6 @@ import lombok.Setter;
 import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@@ -31,13 +30,11 @@ import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 
-import static org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord.SERVICE_ID;
 import static org.apache.skywalking.oap.server.core.analysis.record.Record.TIME_BUCKET;
 
 @SuperDataset
 @Stream(name = LogRecord.INDEX_NAME, scopeId = DefaultScopeDefine.LOG, builder = LogRecord.Builder.class, processor = RecordStreamProcessor.class)
 @SQLDatabase.ExtraColumn4AdditionalEntity(additionalTable = AbstractLogRecord.ADDITIONAL_TAG_TABLE, parentColumn = TIME_BUCKET)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_SEC_RANGE_SHARDING_ALGORITHM, dataSourceShardingColumn = SERVICE_ID, tableShardingColumn = TIME_BUCKET)
 @BanyanDB.TimestampColumn(AbstractLogRecord.TIMESTAMP)
 public class LogRecord extends AbstractLogRecord {
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/networkalias/NetworkAddressAlias.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/networkalias/NetworkAddressAlias.java
index a4d621f8c4..a0a6db0f0c 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/networkalias/NetworkAddressAlias.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/networkalias/NetworkAddressAlias.java
@@ -28,11 +28,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -46,7 +44,6 @@ import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.NE
 @EqualsAndHashCode(of = {
     "address"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class NetworkAddressAlias extends Metrics {
     public static final String INDEX_NAME = "network_address_alias";
     public static final String ADDRESS = "address";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ProcessTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ProcessTraffic.java
index 878563c605..03fb399caf 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ProcessTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ProcessTraffic.java
@@ -31,11 +31,9 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -52,7 +50,6 @@ import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.PR
     "instanceId",
     "name",
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 @BanyanDB.StoreIDAsTag
 public class ProcessTraffic extends Metrics {
     public static final String INDEX_NAME = "process_traffic";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ServiceLabelRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ServiceLabelRecord.java
index d52ac8cf40..95e6d4e307 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ServiceLabelRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/process/ServiceLabelRecord.java
@@ -25,11 +25,9 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -50,7 +48,6 @@ import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.SE
         "serviceId",
         "label"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ServiceLabelRecord extends Metrics {
 
     public static final String INDEX_NAME = "service_label";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/endpoint/EndpointRelationServerSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/endpoint/EndpointRelationServerSideMetrics.java
index 71e94ebf08..b5420c3677 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/endpoint/EndpointRelationServerSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/endpoint/EndpointRelationServerSideMetrics.java
@@ -27,25 +27,19 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ENTITY_ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.TIME_BUCKET;
-
 @Stream(name = EndpointRelationServerSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.ENDPOINT_RELATION,
     builder = EndpointRelationServerSideMetrics.Builder.class, processor = MetricsStreamProcessor.class)
 @MetricsExtension(supportDownSampling = true, supportUpdate = false, timeRelativeID = true)
 @EqualsAndHashCode(of = {
     "entityId"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_BUCKET_SHARDING_ALGORITHM, tableShardingColumn = TIME_BUCKET, dataSourceShardingColumn = ENTITY_ID)
 public class EndpointRelationServerSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "endpoint_relation_server_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationClientSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationClientSideMetrics.java
index e72d3414ef..39a21ee9fb 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationClientSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationClientSideMetrics.java
@@ -27,25 +27,19 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ENTITY_ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.TIME_BUCKET;
-
 @Stream(name = ServiceInstanceRelationClientSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_INSTANCE_RELATION,
     builder = ServiceInstanceRelationClientSideMetrics.Builder.class, processor = MetricsStreamProcessor.class)
 @MetricsExtension(supportDownSampling = true, supportUpdate = false, timeRelativeID = true)
 @EqualsAndHashCode(of = {
     "entityId"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_BUCKET_SHARDING_ALGORITHM, tableShardingColumn = TIME_BUCKET, dataSourceShardingColumn = ENTITY_ID)
 public class ServiceInstanceRelationClientSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "service_instance_relation_client_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationServerSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationServerSideMetrics.java
index b470f1d78c..1a0753df3a 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationServerSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/instance/ServiceInstanceRelationServerSideMetrics.java
@@ -27,25 +27,19 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ENTITY_ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.TIME_BUCKET;
-
 @Stream(name = ServiceInstanceRelationServerSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_INSTANCE_RELATION,
     builder = ServiceInstanceRelationServerSideMetrics.Builder.class, processor = MetricsStreamProcessor.class)
 @MetricsExtension(supportDownSampling = true, supportUpdate = false, timeRelativeID = true)
 @EqualsAndHashCode(of = {
     "entityId"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_BUCKET_SHARDING_ALGORITHM, tableShardingColumn = TIME_BUCKET, dataSourceShardingColumn = ENTITY_ID)
 public class ServiceInstanceRelationServerSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "service_instance_relation_server_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationClientSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationClientSideMetrics.java
index 79a943ccf8..e4f54d086a 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationClientSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationClientSideMetrics.java
@@ -28,11 +28,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -44,7 +42,6 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
     "entityId",
     "component_id"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ProcessRelationClientSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "process_relation_client_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationServerSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationServerSideMetrics.java
index 09d8d3f7cc..d5dbaa2c28 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationServerSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/process/ProcessRelationServerSideMetrics.java
@@ -28,11 +28,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -43,7 +41,6 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 @EqualsAndHashCode(of = {
     "entityId"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ProcessRelationServerSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "process_relation_server_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationClientSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationClientSideMetrics.java
index 5288d13c98..9632c6c32c 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationClientSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationClientSideMetrics.java
@@ -27,25 +27,19 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
 import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ENTITY_ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.TIME_BUCKET;
-
 @Stream(name = ServiceRelationClientSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_RELATION,
     builder = ServiceRelationClientSideMetrics.Builder.class, processor = MetricsStreamProcessor.class)
 @EqualsAndHashCode(of = {
     "entityId"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_BUCKET_SHARDING_ALGORITHM, tableShardingColumn = TIME_BUCKET, dataSourceShardingColumn = ENTITY_ID)
 public class ServiceRelationClientSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "service_relation_client_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationServerSideMetrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationServerSideMetrics.java
index 8ac2fd2ab5..4940b43ca7 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationServerSideMetrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/relation/service/ServiceRelationServerSideMetrics.java
@@ -28,26 +28,20 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
 import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ENTITY_ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.TIME_BUCKET;
-
 @Stream(name = ServiceRelationServerSideMetrics.INDEX_NAME, scopeId = DefaultScopeDefine.SERVICE_RELATION,
     builder = ServiceRelationServerSideMetrics.Builder.class, processor = MetricsStreamProcessor.class)
 @MetricsExtension(supportDownSampling = true, supportUpdate = true, timeRelativeID = true)
 @EqualsAndHashCode(of = {
     "entityId"
 }, callSuper = true)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_BUCKET_SHARDING_ALGORITHM, tableShardingColumn = TIME_BUCKET, dataSourceShardingColumn = ENTITY_ID)
 public class ServiceRelationServerSideMetrics extends Metrics {
 
     public static final String INDEX_NAME = "service_relation_server_side";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/searchtag/TagAutocompleteData.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/searchtag/TagAutocompleteData.java
index 372e5aeb84..469b6eaac7 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/searchtag/TagAutocompleteData.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/searchtag/TagAutocompleteData.java
@@ -27,11 +27,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -47,7 +45,6 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
     "tagValue",
     "tagType"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class TagAutocompleteData extends Metrics {
     public static final String INDEX_NAME = "tag_autocomplete";
     public static final String TAG_KEY = "tag_key";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/segment/SegmentRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/segment/SegmentRecord.java
index e123596f25..e33adbaee0 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/segment/SegmentRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/segment/SegmentRecord.java
@@ -18,7 +18,6 @@
 
 package org.apache.skywalking.oap.server.core.analysis.manual.segment;
 
-import java.util.List;
 import lombok.Getter;
 import lombok.Setter;
 import org.apache.skywalking.oap.server.core.analysis.Stream;
@@ -26,24 +25,23 @@ import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
 import org.apache.skywalking.oap.server.core.analysis.record.Record;
 import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
+import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
 import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
-import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord.SERVICE_ID;
+import java.util.List;
+
 import static org.apache.skywalking.oap.server.core.analysis.record.Record.TIME_BUCKET;
 
 @SuperDataset
 @Stream(name = SegmentRecord.INDEX_NAME, scopeId = DefaultScopeDefine.SEGMENT, builder = SegmentRecord.Builder.class, processor = RecordStreamProcessor.class)
 @SQLDatabase.ExtraColumn4AdditionalEntity(additionalTable = SegmentRecord.ADDITIONAL_TAG_TABLE, parentColumn = TIME_BUCKET)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_SEC_RANGE_SHARDING_ALGORITHM, dataSourceShardingColumn = SERVICE_ID, tableShardingColumn = TIME_BUCKET)
 @BanyanDB.TimestampColumn(SegmentRecord.START_TIME)
 public class SegmentRecord extends Record {
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/service/ServiceTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/service/ServiceTraffic.java
index af015fbf80..d1404b8809 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/service/ServiceTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/manual/service/ServiceTraffic.java
@@ -18,6 +18,9 @@
 
 package org.apache.skywalking.oap.server.core.analysis.manual.service;
 
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.Setter;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.analysis.Layer;
@@ -27,18 +30,14 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
 import org.apache.skywalking.oap.server.core.storage.annotation.ElasticSearch;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.Setter;
+
 import static org.apache.logging.log4j.util.Base64Util.encode;
 import static org.apache.skywalking.oap.server.core.Const.DOUBLE_COLONS_SPLIT;
 
@@ -49,7 +48,6 @@ import static org.apache.skywalking.oap.server.core.Const.DOUBLE_COLONS_SPLIT;
     "name",
     "layer"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ServiceTraffic extends Metrics {
     public static final String INDEX_NAME = "service_traffic";
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Event.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Event.java
index 6fecb832a3..59555b8b10 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Event.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Event.java
@@ -29,11 +29,9 @@ import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.ScopeDeclaration;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -50,7 +48,6 @@ import static org.apache.skywalking.oap.server.library.util.StringUtil.isNotBlan
     callSuper = false,
     of = "uuid"
 )
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class Event extends Metrics {
 
     public static final String INDEX_NAME = "events";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Metrics.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Metrics.java
index 5c0b212e22..79840056a2 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Metrics.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/metrics/Metrics.java
@@ -23,14 +23,9 @@ import lombok.Getter;
 import lombok.Setter;
 import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
 import org.apache.skywalking.oap.server.core.remote.data.StreamData;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageData;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
-
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ENTITY_ID;
-import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ID;
 
 /**
  * Metrics represents the statistic data, which analysis by OAL script or hard code. It has the lifecycle controlled by
@@ -39,7 +34,6 @@ import static org.apache.skywalking.oap.server.core.analysis.metrics.Metrics.ID;
 @EqualsAndHashCode(of = {
     "timeBucket"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_RELATIVE_ID_SHARDING_ALGORITHM, tableShardingColumn = ID, dataSourceShardingColumn = ENTITY_ID)
 public abstract class Metrics extends StreamData implements StorageData {
     public static final String ENTITY_ID = "entity_id";
     public static final String ID = "id";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/ManagementStreamProcessor.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/ManagementStreamProcessor.java
index f48157a9c2..0c0e6e508f 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/ManagementStreamProcessor.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/ManagementStreamProcessor.java
@@ -18,9 +18,6 @@
 
 package org.apache.skywalking.oap.server.core.analysis.worker;
 
-import java.lang.reflect.InvocationTargetException;
-import java.util.HashMap;
-import java.util.Map;
 import org.apache.skywalking.oap.server.core.CoreModule;
 import org.apache.skywalking.oap.server.core.UnexpectedException;
 import org.apache.skywalking.oap.server.core.analysis.DownSampling;
@@ -38,6 +35,10 @@ import org.apache.skywalking.oap.server.core.storage.model.ModelCreator;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 import org.apache.skywalking.oap.server.library.module.ModuleDefineHolder;
 
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * ManagementProcessor represents the UI/CLI interactive process. They are management data, which size is not huge and
  * time serious.
@@ -77,7 +78,7 @@ public class ManagementStreamProcessor implements StreamProcessor<ManagementData
 
         ModelCreator modelSetter = moduleDefineHolder.find(CoreModule.NAME).provider().getService(ModelCreator.class);
         // Management stream doesn't read data from database during the persistent process. Keep the timeRelativeID == false always.
-        Model model = modelSetter.add(streamClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.None), false);
+        Model model = modelSetter.add(streamClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.None));
 
         final ManagementPersistentWorker persistentWorker = new ManagementPersistentWorker(moduleDefineHolder, model, managementDAO);
         workers.put(streamClass, persistentWorker);
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/MetricsStreamProcessor.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/MetricsStreamProcessor.java
index de4c48a2c7..290b511224 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/MetricsStreamProcessor.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/MetricsStreamProcessor.java
@@ -18,11 +18,6 @@
 
 package org.apache.skywalking.oap.server.core.analysis.worker;
 
-import java.lang.reflect.InvocationTargetException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import lombok.Getter;
 import lombok.Setter;
 import org.apache.skywalking.oap.server.core.CoreModule;
@@ -46,6 +41,12 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 import org.apache.skywalking.oap.server.core.worker.IWorkerInstanceSetter;
 import org.apache.skywalking.oap.server.library.module.ModuleDefineHolder;
 
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 /**
  * MetricsStreamProcessor represents the entrance and creator of the metrics streaming aggregation work flow.
  *
@@ -165,15 +166,13 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> {
         if (supportDownSampling) {
             if (configService.shouldToHour()) {
                 Model model = modelSetter.add(
-                    metricsClass, stream.getScopeId(), new Storage(stream.getName(), timeRelativeID, DownSampling.Hour),
-                    false
+                    metricsClass, stream.getScopeId(), new Storage(stream.getName(), timeRelativeID, DownSampling.Hour)
                 );
                 hourPersistentWorker = downSamplingWorker(moduleDefineHolder, metricsDAO, model, supportUpdate, kind);
             }
             if (configService.shouldToDay()) {
                 Model model = modelSetter.add(
-                    metricsClass, stream.getScopeId(), new Storage(stream.getName(), timeRelativeID, DownSampling.Day),
-                    false
+                    metricsClass, stream.getScopeId(), new Storage(stream.getName(), timeRelativeID, DownSampling.Day)
                 );
                 dayPersistentWorker = downSamplingWorker(moduleDefineHolder, metricsDAO, model, supportUpdate, kind);
             }
@@ -183,8 +182,7 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> {
         }
 
         Model model = modelSetter.add(
-            metricsClass, stream.getScopeId(), new Storage(stream.getName(), timeRelativeID, DownSampling.Minute),
-            false
+            metricsClass, stream.getScopeId(), new Storage(stream.getName(), timeRelativeID, DownSampling.Minute)
         );
         MetricsPersistentWorker minutePersistentWorker = minutePersistentWorker(
             moduleDefineHolder, metricsDAO, model, transWorker, supportUpdate, kind);
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/NoneStreamProcessor.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/NoneStreamProcessor.java
index ffb31ded42..0d59ff3b56 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/NoneStreamProcessor.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/NoneStreamProcessor.java
@@ -77,7 +77,7 @@ public class NoneStreamProcessor implements StreamProcessor<NoneStream> {
 
         ModelCreator modelSetter = moduleDefineHolder.find(CoreModule.NAME).provider().getService(ModelCreator.class);
         // None stream doesn't read data from database during the persistent process. Keep the timeRelativeID == false always.
-        Model model = modelSetter.add(streamClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.Minute), true);
+        Model model = modelSetter.add(streamClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.Minute));
 
         final NoneStreamPersistentWorker persistentWorker = new NoneStreamPersistentWorker(moduleDefineHolder, model, noneStream);
         workers.put(streamClass, persistentWorker);
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/RecordStreamProcessor.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/RecordStreamProcessor.java
index 3d081a8bb5..a962917e23 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/RecordStreamProcessor.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/RecordStreamProcessor.java
@@ -73,7 +73,7 @@ public class RecordStreamProcessor implements StreamProcessor<Record> {
         ModelCreator modelSetter = moduleDefineHolder.find(CoreModule.NAME).provider().getService(ModelCreator.class);
         // Record stream doesn't read data from database during the persistent process. Keep the timeRelativeID == false always.
         Model model = modelSetter.add(
-            recordClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.Second), true);
+            recordClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.Second));
         ExportRecordWorker exportWorker = new ExportRecordWorker(moduleDefineHolder);
         RecordPersistentWorker persistentWorker = new RecordPersistentWorker(moduleDefineHolder, model, recordDAO, exportWorker);
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/TopNStreamProcessor.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/TopNStreamProcessor.java
index dc6efcd5b2..1b4a84e9e2 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/TopNStreamProcessor.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/worker/TopNStreamProcessor.java
@@ -92,7 +92,7 @@ public class TopNStreamProcessor implements StreamProcessor<TopN> {
         ModelCreator modelSetter = moduleDefineHolder.find(CoreModule.NAME).provider().getService(ModelCreator.class);
         // Top N metrics doesn't read data from database during the persistent process. Keep the timeRelativeID == false always.
         Model model = modelSetter.add(
-            topNClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.Second), true);
+            topNClass, stream.scopeId(), new Storage(stream.name(), false, DownSampling.Second));
 
         TopNWorker persistentWorker = new TopNWorker(
             moduleDefineHolder, model, topSize, topNWorkerReportCycle * 60 * 1000L, recordDAO);
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/browser/manual/errorlog/BrowserErrorLogRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/browser/manual/errorlog/BrowserErrorLogRecord.java
index ad24539720..1310a8efc7 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/browser/manual/errorlog/BrowserErrorLogRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/browser/manual/errorlog/BrowserErrorLogRecord.java
@@ -23,22 +23,16 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.record.Record;
 import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 
-import static org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord.SERVICE_ID;
-import static org.apache.skywalking.oap.server.core.analysis.record.Record.TIME_BUCKET;
-
 @SuperDataset
 @Stream(name = BrowserErrorLogRecord.INDEX_NAME, scopeId = DefaultScopeDefine.BROWSER_ERROR_LOG, builder = BrowserErrorLogRecord.Builder.class, processor = RecordStreamProcessor.class)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_SEC_RANGE_SHARDING_ALGORITHM, dataSourceShardingColumn = SERVICE_ID, tableShardingColumn = TIME_BUCKET)
 @BanyanDB.TimestampColumn(BrowserErrorLogRecord.TIMESTAMP)
 public class BrowserErrorLogRecord extends Record {
     public static final String INDEX_NAME = "browser_error_log";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/EBPFProfilingQueryService.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/EBPFProfilingQueryService.java
index 46d80e3c79..066a65b4cd 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/EBPFProfilingQueryService.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/EBPFProfilingQueryService.java
@@ -19,16 +19,6 @@
 package org.apache.skywalking.oap.server.core.profiling.ebpf;
 
 import com.google.gson.Gson;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.CoreModuleConfig;
@@ -63,6 +53,17 @@ import org.apache.skywalking.oap.server.library.module.Service;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
 @Slf4j
 @RequiredArgsConstructor
 public class EBPFProfilingQueryService implements Service {
@@ -190,8 +191,11 @@ public class EBPFProfilingQueryService implements Service {
         return new ArrayList<>(tmpMap.values());
     }
 
-    public List<EBPFProfilingSchedule> queryEBPFProfilingSchedules(String taskId) throws IOException {
+    public List<EBPFProfilingSchedule> queryEBPFProfilingSchedules(String taskId) throws Exception {
         final List<EBPFProfilingSchedule> schedules = getScheduleDAO().querySchedules(taskId);
+
+        log.info("schedules: {}", GSON.toJson(schedules));
+
         if (CollectionUtils.isNotEmpty(schedules)) {
             final Model processModel = getProcessModel();
             final List<Metrics> processMetrics = schedules.stream()
@@ -202,6 +206,8 @@ public class EBPFProfilingQueryService implements Service {
                     }).collect(Collectors.toList());
             final List<Metrics> processes = getProcessMetricsDAO().multiGet(processModel, processMetrics);
 
+            log.info("processes: {}", GSON.toJson(processes));
+
             final Map<String, Process> processMap = processes.stream()
                                                                 .map(t -> (ProcessTraffic) t)
                                                                 .collect(Collectors.toMap(m -> m.id().build(), this::convertProcess));
@@ -238,4 +244,4 @@ public class EBPFProfilingQueryService implements Service {
         }
         return process;
     }
-}
\ No newline at end of file
+}
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingDataRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingDataRecord.java
index d4b8404edf..b40aa23488 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingDataRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingDataRecord.java
@@ -50,10 +50,10 @@ public class EBPFProfilingDataRecord extends Record {
     public static final String DATA_BINARY = "dump_binary";
     public static final String UPLOAD_TIME = "upload_time";
 
-    @Column(name = TASK_ID, length = 600)
+    @Column(name = TASK_ID)
     @BanyanDB.SeriesID(index = 0)
     private String taskId;
-    @Column(name = SCHEDULE_ID, length = 600)
+    @Column(name = SCHEDULE_ID)
     private String scheduleId;
     @Column(name = STACK_ID_LIST)
     private String stackIdList;
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingScheduleRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingScheduleRecord.java
index 98042fbcb6..9e9f78b5e6 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingScheduleRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/ebpf/storage/EBPFProfilingScheduleRecord.java
@@ -26,11 +26,9 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -52,7 +50,6 @@ import static org.apache.skywalking.oap.server.core.source.DefaultScopeDefine.EB
     "processId",
     "startTime",
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class EBPFProfilingScheduleRecord extends Metrics {
 
     public static final String INDEX_NAME = "ebpf_profiling_schedule";
@@ -62,7 +59,7 @@ public class EBPFProfilingScheduleRecord extends Metrics {
     public static final String END_TIME = "end_time";
     public static final String EBPF_PROFILING_SCHEDULE_ID = "ebpf_profiling_schedule_id";
 
-    @Column(name = TASK_ID, length = 600)
+    @Column(name = TASK_ID)
     private String taskId;
     @Column(name = PROCESS_ID, length = 600)
     private String processId;
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/trace/ProfileThreadSnapshotRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/trace/ProfileThreadSnapshotRecord.java
index 218bfa322f..db8eebfa76 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/trace/ProfileThreadSnapshotRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/profiling/trace/ProfileThreadSnapshotRecord.java
@@ -52,11 +52,11 @@ public class ProfileThreadSnapshotRecord extends Record {
     public static final String STACK_BINARY = "stack_binary";
 
     @Column(name = TASK_ID)
-    @SQLDatabase.QueryUnifiedIndex(withColumns = {SEGMENT_ID})
+    @SQLDatabase.CompositeIndex(withColumns = {SEGMENT_ID})
     private String taskId;
     @Column(name = SEGMENT_ID)
-    @SQLDatabase.QueryUnifiedIndex(withColumns = {SEQUENCE})
-    @SQLDatabase.QueryUnifiedIndex(withColumns = {DUMP_TIME})
+    @SQLDatabase.CompositeIndex(withColumns = {SEQUENCE})
+    @SQLDatabase.CompositeIndex(withColumns = {DUMP_TIME})
     @BanyanDB.SeriesID(index = 0)
     private String segmentId;
     @Column(name = DUMP_TIME)
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/DurationUtils.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/DurationUtils.java
index cc53aa83f1..6af72e0494 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/DurationUtils.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/DurationUtils.java
@@ -18,8 +18,6 @@
 
 package org.apache.skywalking.oap.server.core.query;
 
-import java.util.LinkedList;
-import java.util.List;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.UnexpectedException;
 import org.apache.skywalking.oap.server.core.query.enumeration.Step;
@@ -27,6 +25,9 @@ import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 
+import java.util.LinkedList;
+import java.util.List;
+
 public enum DurationUtils {
     INSTANCE;
 
@@ -190,5 +191,4 @@ public enum DurationUtils {
         }
         throw new UnexpectedException("Unsupported step " + step.name());
     }
-
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/ProcessTopologyBuilder.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/ProcessTopologyBuilder.java
index cb57a5c01a..498868109e 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/ProcessTopologyBuilder.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/ProcessTopologyBuilder.java
@@ -18,15 +18,6 @@
 
 package org.apache.skywalking.oap.server.core.query;
 
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.CoreModule;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
@@ -44,6 +35,15 @@ import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.model.StorageModels;
 import org.apache.skywalking.oap.server.library.module.ModuleManager;
 
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
 @Slf4j
 public class ProcessTopologyBuilder {
     private final IComponentLibraryCatalogService componentLibraryCatalogService;
@@ -68,7 +68,7 @@ public class ProcessTopologyBuilder {
     }
 
     ProcessTopology build(List<Call.CallDetail> clientCalls,
-                          List<Call.CallDetail> serverCalls) throws IOException {
+                          List<Call.CallDetail> serverCalls) throws Exception {
         List<Call> calls = new LinkedList<>();
         HashMap<String, Call> callMap = new HashMap<>();
 
@@ -135,4 +135,4 @@ public class ProcessTopologyBuilder {
         processNode.setReal(!Objects.equals(traffic.getDetectType(), ProcessDetectType.VIRTUAL.value()));
         return processNode;
     }
-}
\ No newline at end of file
+}
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/TopologyQueryService.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/TopologyQueryService.java
index 4ed5796127..69ebfee476 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/TopologyQueryService.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/TopologyQueryService.java
@@ -19,11 +19,6 @@
 package org.apache.skywalking.oap.server.core.query;
 
 import com.google.common.base.Strings;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.CoreModule;
@@ -45,6 +40,12 @@ import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.module.Service;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
 @Slf4j
 public class TopologyQueryService implements Service {
     private final ModuleManager moduleManager;
@@ -198,7 +199,7 @@ public class TopologyQueryService implements Service {
         return topology;
     }
 
-    public ProcessTopology getProcessTopology(final String instanceId, final Duration duration) throws IOException {
+    public ProcessTopology getProcessTopology(final String instanceId, final Duration duration) throws Exception {
         final List<Call.CallDetail> clientCalls = getTopologyQueryDAO().loadProcessRelationDetectedAtClientSide(instanceId, duration);
         final List<Call.CallDetail> serverCalls = getTopologyQueryDAO().loadProcessRelationDetectedAtServerSide(instanceId, duration);
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/input/Duration.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/input/Duration.java
index a9d16ba397..ff71638e1b 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/input/Duration.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/input/Duration.java
@@ -18,13 +18,14 @@
 
 package org.apache.skywalking.oap.server.core.query.input;
 
-import java.util.List;
 import lombok.Getter;
 import lombok.Setter;
 import org.apache.skywalking.oap.server.core.query.DurationUtils;
 import org.apache.skywalking.oap.server.core.query.PointOfTime;
 import org.apache.skywalking.oap.server.core.query.enumeration.Step;
 
+import java.util.List;
+
 @Getter
 @Setter
 public class Duration {
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Alarms.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Alarms.java
index cbb1257514..b065d6aa39 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Alarms.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Alarms.java
@@ -18,9 +18,10 @@
 
 package org.apache.skywalking.oap.server.core.query.type;
 
+import lombok.Getter;
+
 import java.util.ArrayList;
 import java.util.List;
-import lombok.Getter;
 
 @Getter
 public class Alarms {
@@ -30,4 +31,8 @@ public class Alarms {
     public Alarms() {
         this.msgs = new ArrayList<>();
     }
+
+    public Alarms(List<AlarmMessage> msgs) {
+        this.msgs = msgs;
+    }
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/BrowserErrorLogs.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/BrowserErrorLogs.java
index 04b5912adc..3d0ac0c66f 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/BrowserErrorLogs.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/BrowserErrorLogs.java
@@ -17,11 +17,14 @@
 
 package org.apache.skywalking.oap.server.core.query.type;
 
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+
 import java.util.ArrayList;
 import java.util.List;
-import lombok.Getter;
 
 @Getter
+@RequiredArgsConstructor
 public class BrowserErrorLogs {
     private final List<BrowserErrorLog> logs;
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java
index e9d6759a50..523e0a69fe 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java
@@ -18,12 +18,13 @@
 
 package org.apache.skywalking.oap.server.core.query.type;
 
-import java.util.ArrayList;
-import java.util.List;
 import lombok.Getter;
 import lombok.Setter;
 import lombok.experimental.Accessors;
 
+import java.util.ArrayList;
+import java.util.List;
+
 @Setter
 @Getter
 @Accessors(chain = true)
@@ -34,4 +35,8 @@ public class Logs {
     public Logs() {
         this.logs = new ArrayList<>();
     }
+
+    public Logs(final List<Log> logs) {
+        this.logs = logs;
+    }
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Record.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Record.java
index f9ad565964..36d5cec99c 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Record.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Record.java
@@ -50,4 +50,4 @@ public class Record {
         result.setValue(getValue());
         return result;
     }
-}
\ No newline at end of file
+}
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/TraceBrief.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/TraceBrief.java
index 81598bbdd4..dbbca5154e 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/TraceBrief.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/TraceBrief.java
@@ -18,9 +18,10 @@
 
 package org.apache.skywalking.oap.server.core.query.type;
 
+import lombok.Getter;
+
 import java.util.ArrayList;
 import java.util.List;
-import lombok.Getter;
 
 @Getter
 public class TraceBrief {
@@ -29,4 +30,8 @@ public class TraceBrief {
     public TraceBrief() {
         this.traces = new ArrayList<>();
     }
+
+    public TraceBrief(List<BasicTrace> traces) {
+        this.traces = traces;
+    }
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/event/Events.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/event/Events.java
index a586edcd10..8136efb77a 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/event/Events.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/event/Events.java
@@ -18,11 +18,20 @@
 
 package org.apache.skywalking.oap.server.core.query.type.event;
 
+import lombok.Data;
+
 import java.util.ArrayList;
 import java.util.List;
-import lombok.Data;
 
 @Data
 public class Events {
-    private List<Event> events = new ArrayList<>();
+    private final List<Event> events;
+
+    public Events() {
+        events = new ArrayList<>();
+    }
+
+    public Events(List<Event> events) {
+        this.events = events;
+    }
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/source/ScopeDefaultColumn.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/source/ScopeDefaultColumn.java
index 61566f9f70..3ebef71fe1 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/source/ScopeDefaultColumn.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/source/ScopeDefaultColumn.java
@@ -18,11 +18,12 @@
 
 package org.apache.skywalking.oap.server.core.source;
 
+import lombok.Getter;
+
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
-import lombok.Getter;
 
 /**
  * Define the default columns of source scope. These columns pass down into the persistent entity(OAL metrics entity)
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/IMetricsDAO.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/IMetricsDAO.java
index 1baee65e4c..c923f007a0 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/IMetricsDAO.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/IMetricsDAO.java
@@ -18,15 +18,16 @@
 
 package org.apache.skywalking.oap.server.core.storage;
 
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
 import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
 import org.apache.skywalking.oap.server.library.client.request.UpdateRequest;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
 /**
  * Metrics related DAO.
  */
@@ -39,7 +40,7 @@ public interface IMetricsDAO extends DAO {
      * @return the data of all given IDs. Only include existing data. Don't require to keep the same order of ids list.
      * @throws IOException when error occurs in data query.
      */
-    List<Metrics> multiGet(Model model, List<Metrics> metrics) throws IOException;
+    List<Metrics> multiGet(Model model, List<Metrics> metrics) throws Exception;
 
     /**
      * Transfer the given metrics to an executable insert statement.
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ShardingAlgorithm.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ShardingAlgorithm.java
deleted file mode 100644
index ad655cd0dd..0000000000
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ShardingAlgorithm.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.skywalking.oap.server.core.storage;
-
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
-
-/**
- * The following algorithms represent different Table Sharding strategies.
- * Cooperate with tableShardingColumn and dataSourceShardingColumn which are defined in {@link SQLDatabase.Sharding}
- *
- * The DataSource sharding strategy are same:
- * If we have {dataSourceList = ds_0...ds_n} and {dataSourceShardingColumn.hashcode() % dataSourceList.size() = 2}
- * then the route target is ds_2
- *
- * The sharding number of the tables is according to TTL, One table per day:
- * {tableName = table_timeSeries}, {timeSeries = currentDate - TTL +1 ... currentDate + 1}
- * For example: if TTL=3, currentDate = 20220907, the sharding tables are:
- * Table_20220905
- * Table_20220906
- * Table_20220907
- * Table_20220908
- */
-public enum ShardingAlgorithm {
-    /**
-     * Wouldn't sharding Table nor DataSource, keep single Table.
-     */
-    NO_SHARDING,
-
-    /**
-     * Use the time_bucket inside the ID column to sharding by day.
-     * The precision of time_bucket could be second, minute, hour and day in the same table.
-     *
-     * For example, the single table `service_metrics`:
-     * ┌────────────────────────┬────────────┐
-     * │         id             │    value   │
-     * ├────────────────────────┼────────────┤
-     * │ 20220905_Service_A     │   300      │
-     * ├────────────────────────┼────────────┤
-     * │ 2022090512_Service_A   │   200      │
-     * ├────────────────────────┼────────────┤
-     * │ 202209051211_Service_A │   100      │
-     * ├────────────────────────┼────────────┤
-     * │ 20220906_Service_A     │   500      │
-     * ├────────────────────────┼────────────┤
-     * │ 2022090612_Service_A   │   300      │
-     * ├────────────────────────┼────────────┤
-     * │ 202209061211_Service_A │   100      │
-     * └────────────────────────┴────────────┘
-     *
-     * The sharding tables will be:
-     * `service_metrics_20220905`
-     * ┌────────────────────────┬────────────┐
-     * │         id             │    value   │
-     * ├────────────────────────┼────────────┤
-     * │ 20220905_Service_A     │   300      │
-     * ├────────────────────────┼────────────┤
-     * │ 2022090512_Service_A   │   200      │
-     * ├────────────────────────┼────────────┤
-     * │ 202209051211_Service_A │   100      │
-     * └────────────────────────┴────────────┘
-     * and `service_metrics_20220906`
-     * ┌────────────────────────┬────────────┐
-     * │         id             │    value   │
-     * ├────────────────────────┼────────────┤
-     * │ 20220906_Service_A     │   500      │
-     * ├────────────────────────┼────────────┤
-     * │ 2022090612_Service_A   │   300      │
-     * ├────────────────────────┼────────────┤
-     * │ 202209061211_Service_A │   100      │
-     * └────────────────────────┴────────────┘
-     *
-     */
-    TIME_RELATIVE_ID_SHARDING_ALGORITHM,
-
-    /**
-     * Use the time_bucket column to sharding by day.
-     * The precision of time_bucket should be `second`.
-     *
-     * For example, the single table `service_records`:
-     * ┌──────────────┬───────────────┬─────────┐
-     * │   Service    │  time_bucket  │  value  │
-     * ├──────────────┼───────────────┼─────────┤
-     * │   Service_A  │20220905121130 │ 300     │
-     * ├──────────────┼───────────────┼─────────┤
-     * │   Service_A  │20220906181233 │ 200     │
-     * └──────────────┴───────────────┴─────────┘
-     * The sharding tables will be:
-     * `service_records_20220905`
-     * ┌──────────────┬───────────────┬─────────┐
-     * │   Service    │  time_bucket  │  value  │
-     * ├──────────────┼───────────────┼─────────┤
-     * │   Service_A  │20220905121130 │ 300     │
-     * └──────────────┴───────────────┴─────────┘
-     * and `service_records_20220906`
-     * ┌──────────────┬───────────────┬─────────┐
-     * │   Service    │  time_bucket  │  value  │
-     * ├──────────────┼───────────────┼─────────┤
-     * │   Service_A  │20220906181233 │ 200     │
-     * └──────────────┴───────────────┴─────────┘
-     */
-    TIME_SEC_RANGE_SHARDING_ALGORITHM,
-
-    /**
-     * Use the time_bucket column to sharding by day.
-     * The precision of time_bucket should be `minute`.
-     *
-     * For example, the single table `endpoint_traffic`:
-     * ┌──────────────┬───────────────┐
-     * │   Endpoint   │  time_bucket  │
-     * ├──────────────┼───────────────┤
-     * │   Endpoint_A │202209051211   │
-     * ├──────────────┼───────────────┤
-     * │   Endpoint_B │202209061812   │
-     * └──────────────┴───────────────┘
-     * The sharding tables will be:
-     * `endpoint_traffic_20220905`
-     * ┌──────────────┬───────────────┐
-     * │   Endpoint   │  time_bucket  │
-     * ├──────────────┼───────────────┤
-     * │   Endpoint_A │202209051211   │
-     * └──────────────┴───────────────┘
-     * and `endpoint_traffic_20220906`
-     * ┌──────────────┬───────────────┐
-     * │   Endpoint   │  time_bucket  │
-     * ├──────────────┼───────────────┤
-     * │   Endpoint_B │202209061812   │
-     * └──────────────┴───────────────┘
-     */
-    TIME_MIN_RANGE_SHARDING_ALGORITHM,
-
-    /**
-     * Use the time_bucket column to sharding by day.
-     * The precision of time_bucket could be `second, minute, hour and day` in the same table.
-     * For example, the single table `service_relation`:
-     * ┌────────────────┬────────────────┐
-     * │   relation     │   time_bucket  │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 20220905       │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 2022090512     │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 202209051211   │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 20220906       │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 2022090612     │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 202209061211   │
-     * └────────────────┴────────────────┘
-     * The sharding tables will be:
-     * `service_relation_20220905`
-     * ┌────────────────┬────────────────┐
-     * │   relation     │   time_bucket  │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 20220905       │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 2022090512     │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 202209051211   │
-     * └────────────────┴────────────────┘
-     * and `endpoint_traffic_20220906`
-     * ┌────────────────┬────────────────┐
-     * │   relation     │   time_bucket  │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 20220906       │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 2022090612     │
-     * ├────────────────┼────────────────┤
-     * │   Service_A_B  │ 202209061211   │
-     * └────────────────┴────────────────┘
-     */
-    TIME_BUCKET_SHARDING_ALGORITHM
-}
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/annotation/SQLDatabase.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/annotation/SQLDatabase.java
index 970cee0496..e1fdee2089 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/annotation/SQLDatabase.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/annotation/SQLDatabase.java
@@ -23,7 +23,6 @@ import java.lang.annotation.Repeatable;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 
 /**
  * SQLDatabase annotation is a holder including all annotations for SQL-based RDBMS storage
@@ -32,13 +31,14 @@ import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
  */
 public @interface SQLDatabase {
     /**
-     * QueryIndex defines the unified index is required in the query stage. This works only the storage supports this kind
-     * of index model. Mostly, work for the typical relational database, such as MySQL, TiDB.
+     * {@code CompositeIndex} defines the composite index required in the query stage.
+     * This works only when the storage supports this kind of index model, mostly,
+     * work for the typical relational database, such as MySQL, TiDB.
      */
     @Target({ElementType.FIELD})
     @Retention(RetentionPolicy.RUNTIME)
-    @Repeatable(MultipleQueryUnifiedIndex.class)
-    @interface QueryUnifiedIndex {
+    @Repeatable(CompositeIndices.class)
+    @interface CompositeIndex {
 
         /**
          * @return list of other column should be add into the unified index.
@@ -47,12 +47,12 @@ public @interface SQLDatabase {
     }
 
     /**
-     * The support of the multiple {@link QueryUnifiedIndex}s on one field.
+     * The support of the multiple {@link CompositeIndex}s on one field.
      */
     @Target({ElementType.FIELD})
     @Retention(RetentionPolicy.RUNTIME)
-    @interface MultipleQueryUnifiedIndex {
-        QueryUnifiedIndex[] value();
+    @interface CompositeIndices {
+        CompositeIndex[] value();
     }
 
     /**
@@ -74,7 +74,7 @@ public @interface SQLDatabase {
      * <p>
      * In H2TraceQueryDAO#queryBasicTraces query tags as condition from this additional table, could build sql like this:
      * <pre>{@code
-     *             if (!CollectionUtils.isEmpty(tags)) {
+     *         if (!CollectionUtils.isEmpty(tags)) {
      *             for (int i = 0; i < tags.size(); i++) {
      *                 sql.append(" inner join ").append(SegmentRecord.ADDITIONAL_TAG_TABLE).append(" ");
      *                 sql.append(SegmentRecord.ADDITIONAL_TAG_TABLE + i);
@@ -83,7 +83,7 @@ public @interface SQLDatabase {
      *             }
      *         }
      *         ...
-     *             if (CollectionUtils.isNotEmpty(tags)) {
+     *         if (CollectionUtils.isNotEmpty(tags)) {
      *             for (int i = 0; i < tags.size(); i++) {
      *                 final int foundIdx = searchableTagKeys.indexOf(tags.get(i).getKey());
      *                 if (foundIdx > -1) {
@@ -96,11 +96,11 @@ public @interface SQLDatabase {
      *                 }
      *             }
      *         }
-     *         }</pre>
+     * }</pre>
      * <p>
      * <ul>
      * <li>If no tags condition, only query segment table, the SQL should be: select
-     * column1, column2 ... from segment where 1=1 and colunm1=xx ...
+     * column1, column2 ... from segment where 1=1 and column1=xx ...
      *
      * <li> If 1 tag condition, query both segment and segment_tag tables, the SQL should be: select column1, column2 ...
      * from segment inner join segment_tag segment_tag0 on segment.id=segment_tag0.id where 1=1 and colunm1=xx ... and
@@ -108,7 +108,7 @@ public @interface SQLDatabase {
      *
      * <li> If 2 or more tags condition, query both segment and segment_tag tables, the SQL should be: select column1,
      * column2 ... from segment inner join segment_tag segment_tag0 on segment.id=segment_tag0.id inner join segment_tag
-     * segment_tag1 on segment.id=segment_tag1.id ... where 1=1 and colunm1=xx ... and segment_tag0=tagString0 and
+     * segment_tag1 on segment.id=segment_tag1.id ... where 1=1 and column1=xx ... and segment_tag0=tagString0 and
      * segment_tag1=tagString1 ...
      * </ul>
      */
@@ -140,16 +140,4 @@ public @interface SQLDatabase {
     @interface MultipleExtraColumn4AdditionalEntity {
         ExtraColumn4AdditionalEntity[] value();
     }
-
-    /**
-     * Support add sharding policy for a table model. Can be declared on the superclass and overridden by subclass..
-     * @since 9.3.0
-     */
-    @Target({ElementType.TYPE})
-    @Retention(RetentionPolicy.RUNTIME)
-    @interface Sharding {
-        ShardingAlgorithm shardingAlgorithm();
-        String dataSourceShardingColumn() default "";
-        String tableShardingColumn() default "";
-    }
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/Model.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/Model.java
index c346800d63..5ceb6294d2 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/Model.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/Model.java
@@ -18,52 +18,37 @@
 
 package org.apache.skywalking.oap.server.core.storage.model;
 
-import java.util.List;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
+import lombok.RequiredArgsConstructor;
 import org.apache.skywalking.oap.server.core.analysis.DownSampling;
+import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
+import org.apache.skywalking.oap.server.core.analysis.record.Record;
+
+import java.util.List;
 
 /**
  * The model definition of a logic entity.
  */
 @Getter
 @EqualsAndHashCode
+@RequiredArgsConstructor
 public class Model {
     private final String name;
     private final List<ModelColumn> columns;
     private final int scopeId;
     private final DownSampling downsampling;
-    private final boolean record;
     private final boolean superDataset;
-    private final boolean isTimeSeries;
     private final Class<?> streamClass;
     private final boolean timeRelativeID;
     private final SQLDatabaseModelExtension sqlDBModelExtension;
     private final BanyanDBModelExtension banyanDBModelExtension;
     private final ElasticSearchModelExtension elasticSearchModelExtension;
 
-    public Model(final String name,
-                 final List<ModelColumn> columns,
-                 final int scopeId,
-                 final DownSampling downsampling,
-                 final boolean record,
-                 final boolean superDataset,
-                 final Class<?> streamClass,
-                 boolean timeRelativeID,
-                 final SQLDatabaseModelExtension sqlDBModelExtension,
-                 final BanyanDBModelExtension banyanDBModelExtension,
-                 final ElasticSearchModelExtension elasticSearchModelExtension) {
-        this.name = name;
-        this.columns = columns;
-        this.scopeId = scopeId;
-        this.downsampling = downsampling;
-        this.isTimeSeries = !DownSampling.None.equals(downsampling);
-        this.record = record;
-        this.superDataset = superDataset;
-        this.streamClass = streamClass;
-        this.timeRelativeID = timeRelativeID;
-        this.sqlDBModelExtension = sqlDBModelExtension;
-        this.banyanDBModelExtension = banyanDBModelExtension;
-        this.elasticSearchModelExtension = elasticSearchModelExtension;
-    }
+    @Getter(lazy = true)
+    private final boolean isMetric = Metrics.class.isAssignableFrom(getStreamClass());
+    @Getter(lazy = true)
+    private final boolean isRecord = Record.class.isAssignableFrom(getStreamClass());
+    @Getter(lazy = true)
+    private final boolean isTimeSeries = !DownSampling.None.equals(getDownsampling());
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelColumn.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelColumn.java
index 2683b9f93e..bfc5ba3221 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelColumn.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelColumn.java
@@ -18,11 +18,12 @@
 
 package org.apache.skywalking.oap.server.core.storage.model;
 
-import java.lang.reflect.Type;
 import lombok.Getter;
 import lombok.ToString;
 import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
 
+import java.lang.reflect.Type;
+
 @Getter
 @ToString
 public class ModelColumn {
@@ -49,19 +50,19 @@ public class ModelColumn {
      *
      * @since 9.1.0
      */
-    private SQLDatabaseExtension sqlDatabaseExtension;
+    private final SQLDatabaseExtension sqlDatabaseExtension;
     /**
      * Hold configurations especially for ElasticSearch
      *
      * @since 9.1.0
      */
-    private ElasticSearchExtension elasticSearchExtension;
+    private final ElasticSearchExtension elasticSearchExtension;
     /**
      * Hold configurations especially for BanyanDB relevant
      *
      * @since 9.1.0
      */
-    private BanyanDBExtension banyanDBExtension;
+    private final BanyanDBExtension banyanDBExtension;
 
     public ModelColumn(ColumnName columnName,
                        Class<?> type,
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelCreator.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelCreator.java
index 3f75a75401..f8d46e8ad1 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelCreator.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/ModelCreator.java
@@ -31,7 +31,7 @@ public interface ModelCreator extends Service {
      *
      * @return the created new model
      */
-    Model add(Class<?> aClass, int scopeId, Storage storage, boolean record) throws StorageException;
+    Model add(Class<?> aClass, int scopeId, Storage storage) throws StorageException;
 
     void addModelListener(CreatingListener listener) throws StorageException;
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/SQLDatabaseModelExtension.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/SQLDatabaseModelExtension.java
index b7c7313083..b826d7c1ff 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/SQLDatabaseModelExtension.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/SQLDatabaseModelExtension.java
@@ -18,17 +18,15 @@
 
 package org.apache.skywalking.oap.server.core.storage.model;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
 import lombok.AccessLevel;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
 import lombok.RequiredArgsConstructor;
-import lombok.Setter;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * @since 9.1.0
@@ -40,9 +38,6 @@ public class SQLDatabaseModelExtension {
     //exclude the columns from the main table
     private final List<ModelColumn> excludeColumns = new ArrayList<>(5);
 
-    @Setter
-    private Optional<Sharding> sharding = Optional.empty();
-
     public void appendAdditionalTable(String tableName, ModelColumn column) {
         additionalTables.computeIfAbsent(tableName, AdditionalTable::new)
                         .appendColumn(column);
@@ -52,10 +47,6 @@ public class SQLDatabaseModelExtension {
         excludeColumns.add(column);
     }
 
-    public boolean isShardingTable() {
-        return this.sharding.isPresent() && !this.sharding.get().getShardingAlgorithm().equals(ShardingAlgorithm.NO_SHARDING);
-    }
-
     @Getter
     @RequiredArgsConstructor(access = AccessLevel.PRIVATE)
     public static class AdditionalTable {
@@ -73,12 +64,4 @@ public class SQLDatabaseModelExtension {
             columns.add(column);
         }
     }
-
-    @Getter
-    @RequiredArgsConstructor
-    public static class Sharding {
-        private final ShardingAlgorithm shardingAlgorithm;
-        private final String dataSourceShardingColumn;
-        private final String tableShardingColumn;
-    }
 }
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/StorageModels.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/StorageModels.java
index b9df523b5c..c76d820ae1 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/StorageModels.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/model/StorageModels.java
@@ -17,15 +17,8 @@
 
 package org.apache.skywalking.oap.server.core.storage.model;
 
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
 import lombok.extern.slf4j.Slf4j;
+import org.apache.skywalking.oap.server.core.analysis.record.Record;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
 import org.apache.skywalking.oap.server.core.storage.StorageException;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
@@ -36,6 +29,14 @@ import org.apache.skywalking.oap.server.core.storage.annotation.Storage;
 import org.apache.skywalking.oap.server.core.storage.annotation.SuperDataset;
 import org.apache.skywalking.oap.server.core.storage.annotation.ValueColumnMetadata;
 
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
 /**
  * StorageModels manages all models detected by the core.
  */
@@ -52,7 +53,7 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
     }
 
     @Override
-    public Model add(Class<?> aClass, int scopeId, Storage storage, boolean record) throws StorageException {
+    public Model add(Class<?> aClass, int scopeId, Storage storage) throws StorageException {
         // Check this scope id is valid.
         DefaultScopeDefine.nameOf(scopeId);
 
@@ -61,7 +62,7 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
         SQLDatabaseModelExtension sqlDBModelExtension = new SQLDatabaseModelExtension();
         BanyanDBModelExtension banyanDBModelExtension = new BanyanDBModelExtension();
         ElasticSearchModelExtension elasticSearchModelExtension = new ElasticSearchModelExtension();
-        retrieval(aClass, storage.getModelName(), modelColumns, scopeId, checker, sqlDBModelExtension, record);
+        retrieval(aClass, storage.getModelName(), modelColumns, scopeId, checker, sqlDBModelExtension);
         // Add extra column for additional entities
         if (aClass.isAnnotationPresent(SQLDatabase.ExtraColumn4AdditionalEntity.class)
             || aClass.isAnnotationPresent(SQLDatabase.MultipleExtraColumn4AdditionalEntity.class)) {
@@ -107,7 +108,6 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
             modelColumns,
             scopeId,
             storage.getDownsampling(),
-            record,
             isSuperDatasetModel(aClass),
             aClass,
             storage.isTimeRelativeID(),
@@ -130,7 +130,7 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
     }
 
     /**
-     * CreatingListener listener could react when {@link #add(Class, int, Storage, boolean)} model happens. Also, the
+     * CreatingListener listener could react when {@link ModelCreator#add(Class, int, Storage)} model happens. Also, the
      * added models are being notified in this add operation.
      */
     @Override
@@ -149,8 +149,7 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
                            final List<ModelColumn> modelColumns,
                            final int scopeId,
                            ShardingKeyChecker checker,
-                           final SQLDatabaseModelExtension sqlDBModelExtension,
-                           boolean record) {
+                           final SQLDatabaseModelExtension sqlDBModelExtension) {
         if (log.isDebugEnabled()) {
             log.debug("Analysis {} to generate Model.", clazz.getName());
         }
@@ -160,7 +159,7 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
         for (Field field : fields) {
             if (field.isAnnotationPresent(Column.class)) {
                 if (field.isAnnotationPresent(SQLDatabase.AdditionalEntity.class)) {
-                    if (!record) {
+                    if (!Record.class.isAssignableFrom(clazz)) {
                         throw new IllegalStateException(
                             "Model [" + modelName + "] is not a Record, @SQLDatabase.AdditionalEntity only supports Record.");
                     }
@@ -173,14 +172,14 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
 
                 // SQL Database extension
                 SQLDatabaseExtension sqlDatabaseExtension = new SQLDatabaseExtension();
-                List<SQLDatabase.QueryUnifiedIndex> indexDefinitions = new ArrayList<>();
-                if (field.isAnnotationPresent(SQLDatabase.QueryUnifiedIndex.class)) {
-                    indexDefinitions.add(field.getAnnotation(SQLDatabase.QueryUnifiedIndex.class));
+                List<SQLDatabase.CompositeIndex> indexDefinitions = new ArrayList<>();
+                if (field.isAnnotationPresent(SQLDatabase.CompositeIndex.class)) {
+                    indexDefinitions.add(field.getAnnotation(SQLDatabase.CompositeIndex.class));
                 }
 
-                if (field.isAnnotationPresent(SQLDatabase.MultipleQueryUnifiedIndex.class)) {
+                if (field.isAnnotationPresent(SQLDatabase.CompositeIndices.class)) {
                     Collections.addAll(
-                        indexDefinitions, field.getAnnotation(SQLDatabase.MultipleQueryUnifiedIndex.class).value());
+                        indexDefinitions, field.getAnnotation(SQLDatabase.CompositeIndices.class).value());
                 }
 
                 indexDefinitions.forEach(indexDefinition -> {
@@ -239,11 +238,12 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
                 }
 
                 if (field.isAnnotationPresent(SQLDatabase.AdditionalEntity.class)) {
-                    String[] tableNames = field.getAnnotation(SQLDatabase.AdditionalEntity.class).additionalTables();
-                    for (final String tableName : tableNames) {
+                    final var additionalEntity = field.getAnnotation(SQLDatabase.AdditionalEntity.class);
+                    final var additionalTableNames = additionalEntity.additionalTables();
+                    for (final var tableName : additionalTableNames) {
                         sqlDBModelExtension.appendAdditionalTable(tableName, modelColumn);
                     }
-                    if (!field.getAnnotation(SQLDatabase.AdditionalEntity.class).reserveOriginalColumns()) {
+                    if (!additionalEntity.reserveOriginalColumns()) {
                         sqlDBModelExtension.appendExcludeColumns(modelColumn);
                     }
                 }
@@ -261,19 +261,8 @@ public class StorageModels implements IModelManager, ModelCreator, ModelManipula
             }
         }
 
-        // For the annotation need to be declared on the superclass, the other annotation should be declared on the subclass.
-        if (!sqlDBModelExtension.getSharding().isPresent() && clazz.isAnnotationPresent(SQLDatabase.Sharding.class)) {
-            SQLDatabase.Sharding sharding = clazz.getAnnotation(SQLDatabase.Sharding.class);
-            sqlDBModelExtension.setSharding(
-                Optional.of(new SQLDatabaseModelExtension.Sharding(
-                    sharding.shardingAlgorithm(),
-                    sharding.dataSourceShardingColumn(),
-                    sharding.tableShardingColumn()
-                )));
-        }
-
         if (Objects.nonNull(clazz.getSuperclass())) {
-            retrieval(clazz.getSuperclass(), modelName, modelColumns, scopeId, checker, sqlDBModelExtension, record);
+            retrieval(clazz.getSuperclass(), modelName, modelColumns, scopeId, checker, sqlDBModelExtension);
         }
     }
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/query/IZipkinQueryDAO.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/query/IZipkinQueryDAO.java
index 0331f28bab..d7fb3b45ba 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/query/IZipkinQueryDAO.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/query/IZipkinQueryDAO.java
@@ -18,14 +18,15 @@
 
 package org.apache.skywalking.oap.server.core.storage.query;
 
-import java.io.IOException;
-import java.util.List;
-import java.util.Set;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.storage.DAO;
 import zipkin2.Span;
 import zipkin2.storage.QueryRequest;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
 public interface IZipkinQueryDAO extends DAO {
     List<String> getServiceNames() throws IOException;
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ttl/DataTTLKeeperTimer.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ttl/DataTTLKeeperTimer.java
index ebe5b45e0e..9d7d3627ef 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ttl/DataTTLKeeperTimer.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/storage/ttl/DataTTLKeeperTimer.java
@@ -18,13 +18,7 @@
 
 package org.apache.skywalking.oap.server.core.storage.ttl;
 
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.skywalking.oap.server.library.util.RunnableWithExceptionProtection;
 import org.apache.skywalking.oap.server.core.CoreModule;
 import org.apache.skywalking.oap.server.core.CoreModuleConfig;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
@@ -37,6 +31,13 @@ import org.apache.skywalking.oap.server.core.storage.model.IModelManager;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
+import org.apache.skywalking.oap.server.library.util.RunnableWithExceptionProtection;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
 
 /**
  * TTL = Time To Live
@@ -98,7 +99,8 @@ public enum DataTTLKeeperTimer {
             }
             if (log.isDebugEnabled()) {
                 log.debug(
-                    "Is record? {}. RecordDataTTL {}, MetricsDataTTL {}",
+                    "Model {}, is record? {}. RecordDataTTL {}, MetricsDataTTL {}",
+                    model.getName(),
                     model.isRecord(),
                     moduleConfig.getRecordDataTTL(),
                     moduleConfig.getMetricsDataTTL());
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceRelationTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceRelationTraffic.java
index f54b4e25ce..30aeb407d2 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceRelationTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceRelationTraffic.java
@@ -27,11 +27,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -43,7 +41,6 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
     "serviceName",
     "remoteServiceName"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ZipkinServiceRelationTraffic extends Metrics {
 
     public static final String INDEX_NAME = "zipkin_service_relation_traffic";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceSpanTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceSpanTraffic.java
index 1ca5f3ef09..2681815a1e 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceSpanTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceSpanTraffic.java
@@ -28,11 +28,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -44,7 +42,6 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
     "serviceName",
     "spanName"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ZipkinServiceSpanTraffic extends Metrics {
 
     public static final String INDEX_NAME = "zipkin_service_span_traffic";
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceTraffic.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceTraffic.java
index d8b1388406..338cb72fd7 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceTraffic.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinServiceTraffic.java
@@ -28,11 +28,9 @@ import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.analysis.worker.MetricsStreamProcessor;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
-import org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
@@ -43,7 +41,6 @@ import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 @EqualsAndHashCode(of = {
     "serviceName"
 })
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.NO_SHARDING)
 public class ZipkinServiceTraffic extends Metrics {
     public static final String INDEX_NAME = "zipkin_service_traffic";
 
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinSpanRecord.java b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinSpanRecord.java
index 01e8d54590..2723fd16f8 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinSpanRecord.java
+++ b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/zipkin/ZipkinSpanRecord.java
@@ -28,7 +28,6 @@ import org.apache.skywalking.oap.server.core.analysis.Stream;
 import org.apache.skywalking.oap.server.core.analysis.record.Record;
 import org.apache.skywalking.oap.server.core.analysis.worker.RecordStreamProcessor;
 import org.apache.skywalking.oap.server.core.source.DefaultScopeDefine;
-import org.apache.skywalking.oap.server.core.storage.ShardingAlgorithm;
 import org.apache.skywalking.oap.server.core.storage.StorageID;
 import org.apache.skywalking.oap.server.core.storage.annotation.BanyanDB;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@@ -46,13 +45,11 @@ import zipkin2.Span;
 import java.util.List;
 import java.util.Map;
 
-import static org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord.TRACE_ID;
 import static org.apache.skywalking.oap.server.core.analysis.record.Record.TIME_BUCKET;
 
 @SuperDataset
 @Stream(name = ZipkinSpanRecord.INDEX_NAME, scopeId = DefaultScopeDefine.ZIPKIN_SPAN, builder = ZipkinSpanRecord.Builder.class, processor = RecordStreamProcessor.class)
 @SQLDatabase.ExtraColumn4AdditionalEntity(additionalTable = ZipkinSpanRecord.ADDITIONAL_QUERY_TABLE, parentColumn = TIME_BUCKET)
-@SQLDatabase.Sharding(shardingAlgorithm = ShardingAlgorithm.TIME_SEC_RANGE_SHARDING_ALGORITHM, dataSourceShardingColumn = TRACE_ID, tableShardingColumn = TIME_BUCKET)
 @BanyanDB.TimestampColumn(ZipkinSpanRecord.TIMESTAMP_MILLIS)
 public class ZipkinSpanRecord extends Record {
     private static final Gson GSON = new Gson();
diff --git a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/storage/model/StorageModelsTest.java b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/storage/model/StorageModelsTest.java
index 143638b057..fa4c055aaa 100644
--- a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/storage/model/StorageModelsTest.java
+++ b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/storage/model/StorageModelsTest.java
@@ -65,8 +65,7 @@ public class StorageModelsTest {
     public void testStorageModels() throws StorageException {
         StorageModels models = new StorageModels();
         models.add(TestModel.class, -1,
-                   new Storage("StorageModelsTest", false, DownSampling.Hour),
-                   false
+                   new Storage("StorageModelsTest", false, DownSampling.Hour)
         );
 
         final List<Model> allModules = models.allModels();
@@ -91,12 +90,12 @@ public class StorageModelsTest {
         private String column;
 
         @Column(name = "column1")
-        @SQLDatabase.QueryUnifiedIndex(withColumns = {"column2"})
+        @SQLDatabase.CompositeIndex(withColumns = {"column2"})
         private String column1;
 
         @Column(name = "column2")
-        @SQLDatabase.QueryUnifiedIndex(withColumns = {"column1"})
-        @SQLDatabase.QueryUnifiedIndex(withColumns = {"column"})
+        @SQLDatabase.CompositeIndex(withColumns = {"column1"})
+        @SQLDatabase.CompositeIndex(withColumns = {"column"})
         private String column2;
 
         @Column(name = "column", storageOnly = true)
diff --git a/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/hikaricp/JDBCClient.java b/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/hikaricp/JDBCClient.java
new file mode 100644
index 0000000000..49539fb626
--- /dev/null
+++ b/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/hikaricp/JDBCClient.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.oap.server.library.client.jdbc.hikaricp;
+
+import com.zaxxer.hikari.HikariConfig;
+import com.zaxxer.hikari.HikariDataSource;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.skywalking.oap.server.library.client.Client;
+import org.apache.skywalking.oap.server.library.client.healthcheck.DelegatedHealthChecker;
+import org.apache.skywalking.oap.server.library.client.healthcheck.HealthCheckable;
+import org.apache.skywalking.oap.server.library.util.HealthChecker;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+
+/**
+ * JDBC Client uses HikariCP connection management lib to execute SQL.
+ */
+@Slf4j
+public class JDBCClient implements Client, HealthCheckable {
+    private final HikariConfig hikariConfig;
+    private final DelegatedHealthChecker healthChecker;
+    private HikariDataSource dataSource;
+
+    public JDBCClient(Properties properties) {
+        hikariConfig = new HikariConfig(properties);
+        healthChecker = new DelegatedHealthChecker();
+    }
+
+    @Override
+    public void connect() {
+        dataSource = new HikariDataSource(hikariConfig);
+    }
+
+    @Override
+    public void shutdown() {
+        dataSource.close();
+    }
+
+    /**
+     * Default getConnection is set in auto-commit.
+     */
+    public Connection getConnection() throws SQLException {
+        return getConnection(true);
+    }
+
+    public Connection getConnection(boolean autoCommit) throws SQLException {
+        Connection connection = dataSource.getConnection();
+        connection.setAutoCommit(autoCommit);
+        return connection;
+    }
+
+    public void execute(String sql) throws SQLException {
+        if (log.isDebugEnabled()) {
+            log.debug("Executing SQL: {}", sql);
+        }
+
+        try (final var connection = getConnection();
+             final var statement = connection.createStatement()) {
+            statement.execute(sql);
+            statement.closeOnCompletion();
+            healthChecker.health();
+        } catch (SQLException e) {
+            healthChecker.unHealth(e);
+            throw e;
+        }
+    }
+
+    public int executeUpdate(String sql, Object... params) throws SQLException {
+        if (log.isDebugEnabled()) {
+            log.debug("Executing SQL: {}", sql);
+            log.debug("SQL parameters: {}", params);
+        }
+
+        try (final var connection = getConnection();
+             final var statement = connection.prepareStatement(sql)) {
+            setStatementParam(statement, params);
+            int result = statement.executeUpdate();
+            statement.closeOnCompletion();
+            healthChecker.health();
+            return result;
+        } catch (SQLException e) {
+            healthChecker.unHealth(e);
+            throw e;
+        }
+    }
+
+    public <T> T executeQuery(String sql, ResultHandler<T> resultHandler, Object... params) throws SQLException {
+        if (log.isDebugEnabled()) {
+            log.debug("Executing SQL: {}", sql);
+            log.debug("SQL parameters: {}", Arrays.toString(params));
+        }
+        try (final var connection = getConnection();
+             final var statement = connection.prepareStatement(sql)) {
+            setStatementParam(statement, params);
+            try (final var rs = statement.executeQuery()) {
+                healthChecker.health();
+                return resultHandler.handle(rs);
+            }
+        } catch (SQLException e) {
+            healthChecker.unHealth(e);
+            throw e;
+        }
+    }
+
+    private void setStatementParam(PreparedStatement statement, Object[] params) throws SQLException {
+        if (params != null) {
+            for (int i = 0; i < params.length; i++) {
+                Object param = params[i];
+                if (param instanceof String) {
+                    statement.setString(i + 1, (String) param);
+                } else if (param instanceof Integer) {
+                    statement.setInt(i + 1, (int) param);
+                } else if (param instanceof Double) {
+                    statement.setDouble(i + 1, (double) param);
+                } else if (param instanceof Long) {
+                    statement.setLong(i + 1, (long) param);
+                } else {
+                    throw new SQLException("Unsupported data type, type=" + param.getClass().getName());
+                }
+            }
+        }
+    }
+
+    @Override
+    public void registerChecker(HealthChecker healthChecker) {
+        this.healthChecker.register(healthChecker);
+    }
+
+    public boolean indexExists(final String table,
+                               final String index) throws SQLException {
+        try (final var connection = getConnection();
+             final var resultSet = connection.getMetaData().getIndexInfo(null, null, table, false, false)) {
+            while (resultSet.next()) {
+                if (resultSet.getString("INDEX_NAME").equalsIgnoreCase(index)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    public boolean tableExists(final String table) throws SQLException {
+        try (final var conn = getConnection();
+             final var result = conn.getMetaData().getTables(null, null, table, null)) {
+            return result.next();
+        }
+    }
+
+    public Set<String> getTableColumns(final String table) throws SQLException {
+        try (final var conn = getConnection();
+             final var result = conn.getMetaData().getColumns(null, null, table, null)) {
+            final var columns = new HashSet<String>();
+            while (result.next()) {
+                columns.add(result.getString("COLUMN_NAME").toLowerCase());
+            }
+            return columns;
+        }
+    }
+
+    @FunctionalInterface
+    public interface ResultHandler<T> {
+        T handle(ResultSet resultSet) throws SQLException;
+    }
+}
diff --git a/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/hikaricp/JDBCHikariCPClient.java b/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/hikaricp/JDBCHikariCPClient.java
deleted file mode 100644
index 03bc7416f8..0000000000
--- a/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/hikaricp/JDBCHikariCPClient.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.skywalking.oap.server.library.client.jdbc.hikaricp;
-
-import com.zaxxer.hikari.HikariConfig;
-import com.zaxxer.hikari.HikariDataSource;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Properties;
-import org.apache.skywalking.oap.server.library.client.Client;
-import org.apache.skywalking.oap.server.library.client.healthcheck.DelegatedHealthChecker;
-import org.apache.skywalking.oap.server.library.client.healthcheck.HealthCheckable;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.util.HealthChecker;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * JDBC Client uses HikariCP connection management lib to execute SQL.
- */
-public class JDBCHikariCPClient implements Client, HealthCheckable {
-    private static final Logger LOGGER = LoggerFactory.getLogger(JDBCHikariCPClient.class);
-
-    private final HikariConfig hikariConfig;
-    private final DelegatedHealthChecker healthChecker;
-    private HikariDataSource dataSource;
-
-    public JDBCHikariCPClient(Properties properties) {
-        hikariConfig = new HikariConfig(properties);
-        this.healthChecker = new DelegatedHealthChecker();
-    }
-
-    @Override
-    public void connect() {
-        dataSource = new HikariDataSource(hikariConfig);
-    }
-
-    @Override
-    public void shutdown() {
-        dataSource.close();
-    }
-
-    /**
-     * Default getConnection is set in auto-commit.
-     */
-    public Connection getConnection() throws JDBCClientException {
-        return getConnection(true);
-    }
-
-    public Connection getConnection(boolean autoCommit) throws JDBCClientException {
-        try {
-            Connection connection = dataSource.getConnection();
-            connection.setAutoCommit(autoCommit);
-            return connection;
-        } catch (SQLException e) {
-            throw new JDBCClientException(e.getMessage(), e);
-        }
-    }
-
-    public void execute(Connection connection, String sql) throws JDBCClientException {
-        LOGGER.debug("execute sql: {}", sql);
-        try (Statement statement = connection.createStatement()) {
-            statement.execute(sql);
-            healthChecker.health();
-        } catch (SQLException e) {
-            healthChecker.unHealth(e);
-            throw new JDBCClientException(e.getMessage(), e);
-        }
-    }
-
-    public int executeUpdate(Connection connection, String sql, Object... params) throws JDBCClientException {
-        LOGGER.debug("execute query with result: {}", sql);
-        int result;
-        PreparedStatement statement = null;
-        try {
-            statement = connection.prepareStatement(sql);
-            setStatementParam(statement, params);
-            result = statement.executeUpdate();
-            statement.closeOnCompletion();
-            healthChecker.health();
-        } catch (SQLException e) {
-            if (statement != null) {
-                try {
-                    statement.close();
-                } catch (SQLException e1) {
-                }
-            }
-            healthChecker.unHealth(e);
-            throw new JDBCClientException(e.getMessage(), e);
-        }
-
-        return result;
-    }
-
-    public ResultSet executeQuery(Connection connection, String sql, Object... params) throws JDBCClientException {
-        LOGGER.debug("execute query with result: {}", sql);
-        ResultSet rs;
-        PreparedStatement statement = null;
-        try {
-            statement = connection.prepareStatement(sql);
-            setStatementParam(statement, params);
-            rs = statement.executeQuery();
-            statement.closeOnCompletion();
-            healthChecker.health();
-        } catch (SQLException e) {
-            if (statement != null) {
-                try {
-                    statement.close();
-                } catch (SQLException e1) {
-                }
-            }
-            healthChecker.unHealth(e);
-            throw new JDBCClientException(sql, e);
-        }
-
-        return rs;
-    }
-
-    private void setStatementParam(PreparedStatement statement,
-        Object[] params) throws SQLException, JDBCClientException {
-        if (params != null) {
-            for (int i = 0; i < params.length; i++) {
-                Object param = params[i];
-                if (param instanceof String) {
-                    statement.setString(i + 1, (String) param);
-                } else if (param instanceof Integer) {
-                    statement.setInt(i + 1, (int) param);
-                } else if (param instanceof Double) {
-                    statement.setDouble(i + 1, (double) param);
-                } else if (param instanceof Long) {
-                    statement.setLong(i + 1, (long) param);
-                } else {
-                    throw new JDBCClientException("Unsupported data type, type=" + param.getClass().getName());
-                }
-            }
-        }
-    }
-
-    @Override public void registerChecker(HealthChecker healthChecker) {
-        this.healthChecker.register(healthChecker);
-    }
-}
diff --git a/oap-server/server-query-plugin/promql-plugin/src/main/java/org/apache/skywalking/oap/query/promql/rt/PromQLExprQueryVisitor.java b/oap-server/server-query-plugin/promql-plugin/src/main/java/org/apache/skywalking/oap/query/promql/rt/PromQLExprQueryVisitor.java
index 47d2f8633f..8aaf9db95d 100644
--- a/oap-server/server-query-plugin/promql-plugin/src/main/java/org/apache/skywalking/oap/query/promql/rt/PromQLExprQueryVisitor.java
+++ b/oap-server/server-query-plugin/promql-plugin/src/main/java/org/apache/skywalking/oap/query/promql/rt/PromQLExprQueryVisitor.java
@@ -287,7 +287,7 @@ public class PromQLExprQueryVisitor extends PromQLParserBaseVisitor<ParseResult>
                                                     Optional.ofNullable(selectedRecord.getName()), Optional.empty()
             );
             metricData.setMetric(metricInfo);
-            metricData.setValues(buildMatrixValues(duration, selectedRecord.getValue()));
+            metricData.setValues(buildMatrixValues(duration, String.valueOf(selectedRecord.getValue())));
             matrixResult.getMetricDataList().add(metricData);
         }
     }
@@ -306,7 +306,7 @@ public class PromQLExprQueryVisitor extends PromQLParserBaseVisitor<ParseResult>
                                                     Optional.ofNullable(record.getName())
             );
             metricData.setMetric(metricInfo);
-            metricData.setValues(buildMatrixValues(duration, record.getValue()));
+            metricData.setValues(buildMatrixValues(duration, String.valueOf(record.getValue())));
             matrixResult.getMetricDataList().add(metricData);
         }
     }
diff --git a/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/EBPFProcessProfilingQuery.java b/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/EBPFProcessProfilingQuery.java
index 80ffec376a..f94f3b48f9 100644
--- a/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/EBPFProcessProfilingQuery.java
+++ b/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/EBPFProcessProfilingQuery.java
@@ -66,7 +66,7 @@ public class EBPFProcessProfilingQuery implements GraphQLQueryResolver {
         return getQueryService().queryEBPFProfilingTasks(serviceId, serviceInstanceId, targets);
     }
 
-    public List<EBPFProfilingSchedule> queryEBPFProfilingSchedules(String taskId) throws IOException {
+    public List<EBPFProfilingSchedule> queryEBPFProfilingSchedules(String taskId) throws Exception {
         return getQueryService().queryEBPFProfilingSchedules(taskId);
     }
 
@@ -75,4 +75,4 @@ public class EBPFProcessProfilingQuery implements GraphQLQueryResolver {
                                                                 EBPFProfilingAnalyzeAggregateType aggregateType) throws IOException {
         return getQueryService().getEBPFProfilingAnalyzation(scheduleIdList, timeRanges, aggregateType);
     }
-}
\ No newline at end of file
+}
diff --git a/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopNRecordsQuery.java b/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopNRecordsQuery.java
index d0ecb2ca3c..8c5945fe81 100644
--- a/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopNRecordsQuery.java
+++ b/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopNRecordsQuery.java
@@ -19,9 +19,6 @@
 package org.apache.skywalking.oap.query.graphql.resolver;
 
 import graphql.kickstart.tools.GraphQLQueryResolver;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
 import org.apache.skywalking.oap.query.graphql.type.TopNRecordsCondition;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.query.input.TopNCondition;
@@ -29,6 +26,10 @@ import org.apache.skywalking.oap.server.core.query.type.SelectedRecord;
 import org.apache.skywalking.oap.server.core.query.type.TopNRecord;
 import org.apache.skywalking.oap.server.library.module.ModuleManager;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * @since 8.0.0 This query is replaced by {@link MetricsQuery}, all queries have been delegated to there.
  */
diff --git a/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopologyQuery.java b/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopologyQuery.java
index 8534985880..4c4b99aab5 100644
--- a/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopologyQuery.java
+++ b/oap-server/server-query-plugin/query-graphql-plugin/src/main/java/org/apache/skywalking/oap/query/graphql/resolver/TopologyQuery.java
@@ -83,7 +83,7 @@ public class TopologyQuery implements GraphQLQueryResolver {
         return getQueryService().getEndpointDependencies(duration, endpointId);
     }
 
-    public ProcessTopology getProcessTopology(final String instanceId, final Duration duration) throws IOException {
+    public ProcessTopology getProcessTopology(final String instanceId, final Duration duration) throws Exception {
         return getQueryService().getProcessTopology(instanceId, duration);
     }
 }
diff --git a/oap-server/server-starter/pom.xml b/oap-server/server-starter/pom.xml
index f439fbfeb5..7a74feb531 100644
--- a/oap-server/server-starter/pom.xml
+++ b/oap-server/server-starter/pom.xml
@@ -192,21 +192,11 @@
             <artifactId>storage-jdbc-hikaricp-plugin</artifactId>
             <version>${project.version}</version>
         </dependency>
-        <dependency>
-            <groupId>org.apache.skywalking</groupId>
-            <artifactId>storage-tidb-plugin</artifactId>
-            <version>${project.version}</version>
-        </dependency>
         <dependency>
             <groupId>org.apache.skywalking</groupId>
             <artifactId>storage-banyandb-plugin</artifactId>
             <version>${project.version}</version>
         </dependency>
-        <dependency>
-            <groupId>org.apache.skywalking</groupId>
-            <artifactId>storage-shardingsphere-plugin</artifactId>
-            <version>${project.version}</version>
-        </dependency>
         <!-- storage module -->
 
         <!-- queryBuild module -->
diff --git a/oap-server/server-starter/src/main/resources/application.yml b/oap-server/server-starter/src/main/resources/application.yml
index c69f7d92b9..9ec171eeea 100644
--- a/oap-server/server-starter/src/main/resources/application.yml
+++ b/oap-server/server-starter/src/main/resources/application.yml
@@ -175,14 +175,14 @@ storage:
     enableCustomRouting: ${SW_STORAGE_ES_ENABLE_CUSTOM_ROUTING:false}
   h2:
     properties:
-      jdbcUrl: ${SW_STORAGE_H2_URL:jdbc:h2:mem:skywalking-oap-db;DB_CLOSE_DELAY=-1}
+      jdbcUrl: ${SW_STORAGE_H2_URL:jdbc:h2:mem:skywalking-oap-db;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=FALSE}
       dataSource.user: ${SW_STORAGE_H2_USER:sa}
     metadataQueryMaxSize: ${SW_STORAGE_H2_QUERY_MAX_SIZE:5000}
     maxSizeOfBatchSql: ${SW_STORAGE_MAX_SIZE_OF_BATCH_SQL:100}
     asyncBatchPersistentPoolSize: ${SW_STORAGE_ASYNC_BATCH_PERSISTENT_POOL_SIZE:1}
   mysql:
     properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:3306/swtest?rewriteBatchedStatements=true"}
+      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:3306/swtest?rewriteBatchedStatements=true&allowMultiQueries=true"}
       dataSource.user: ${SW_DATA_SOURCE_USER:root}
       dataSource.password: ${SW_DATA_SOURCE_PASSWORD:root@1234}
       dataSource.cachePrepStmts: ${SW_DATA_SOURCE_CACHE_PREP_STMTS:true}
@@ -192,30 +192,6 @@ storage:
     metadataQueryMaxSize: ${SW_STORAGE_MYSQL_QUERY_MAX_SIZE:5000}
     maxSizeOfBatchSql: ${SW_STORAGE_MAX_SIZE_OF_BATCH_SQL:2000}
     asyncBatchPersistentPoolSize: ${SW_STORAGE_ASYNC_BATCH_PERSISTENT_POOL_SIZE:4}
-  mysql-sharding:
-    properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:13307/swtest?rewriteBatchedStatements=true"}
-      dataSource.user: ${SW_DATA_SOURCE_USER:root}
-      dataSource.password: ${SW_DATA_SOURCE_PASSWORD:root}
-    metadataQueryMaxSize: ${SW_STORAGE_MYSQL_QUERY_MAX_SIZE:5000}
-    maxSizeOfBatchSql: ${SW_STORAGE_MAX_SIZE_OF_BATCH_SQL:2000}
-    asyncBatchPersistentPoolSize: ${SW_STORAGE_ASYNC_BATCH_PERSISTENT_POOL_SIZE:4}
-    # The dataSources are configured in ShardingSphere-Proxy config-sharding.yaml.
-    # The dataSource name should include the prefix "ds_" and separated by "," and start from ds_0
-    dataSources: ${SW_JDBC_SHARDING_DATA_SOURCES:ds_0,ds_1}
-  tidb:
-    properties:
-      jdbcUrl: ${SW_JDBC_URL:"jdbc:mysql://localhost:4000/tidbswtest?rewriteBatchedStatements=true"}
-      dataSource.user: ${SW_DATA_SOURCE_USER:root}
-      dataSource.password: ${SW_DATA_SOURCE_PASSWORD:""}
-      dataSource.cachePrepStmts: ${SW_DATA_SOURCE_CACHE_PREP_STMTS:true}
-      dataSource.prepStmtCacheSize: ${SW_DATA_SOURCE_PREP_STMT_CACHE_SQL_SIZE:250}
-      dataSource.prepStmtCacheSqlLimit: ${SW_DATA_SOURCE_PREP_STMT_CACHE_SQL_LIMIT:2048}
-      dataSource.useServerPrepStmts: ${SW_DATA_SOURCE_USE_SERVER_PREP_STMTS:true}
-      dataSource.useAffectedRows: ${SW_DATA_SOURCE_USE_AFFECTED_ROWS:true}
-    metadataQueryMaxSize: ${SW_STORAGE_MYSQL_QUERY_MAX_SIZE:5000}
-    maxSizeOfBatchSql: ${SW_STORAGE_MAX_SIZE_OF_BATCH_SQL:2000}
-    asyncBatchPersistentPoolSize: ${SW_STORAGE_ASYNC_BATCH_PERSISTENT_POOL_SIZE:4}
   postgresql:
     properties:
       jdbcUrl: ${SW_JDBC_URL:"jdbc:postgresql://localhost:5432/skywalking"}
diff --git a/oap-server/server-starter/src/test/java/org/apache/skywalking/oap/server/starter/config/ApplicationConfigLoaderTestCase.java b/oap-server/server-starter/src/test/java/org/apache/skywalking/oap/server/starter/config/ApplicationConfigLoaderTestCase.java
index a540090b85..a381363f9f 100644
--- a/oap-server/server-starter/src/test/java/org/apache/skywalking/oap/server/starter/config/ApplicationConfigLoaderTestCase.java
+++ b/oap-server/server-starter/src/test/java/org/apache/skywalking/oap/server/starter/config/ApplicationConfigLoaderTestCase.java
@@ -47,7 +47,7 @@ public class ApplicationConfigLoaderTestCase {
         assertThat(providerConfig.get("metadataQueryMaxSize")).isEqualTo(5000);
         assertThat(providerConfig.get("properties")).isInstanceOf(Properties.class);
         Properties properties = (Properties) providerConfig.get("properties");
-        assertThat(properties.get("jdbcUrl")).isEqualTo("jdbc:mysql://localhost:3306/swtest?rewriteBatchedStatements=true");
+        assertThat(properties.get("jdbcUrl")).isEqualTo("jdbc:mysql://localhost:3306/swtest?rewriteBatchedStatements=true&allowMultiQueries=true");
     }
 
     @Test
diff --git a/oap-server/server-storage-plugin/pom.xml b/oap-server/server-storage-plugin/pom.xml
index c1fa68a955..80c2cdd5e2 100644
--- a/oap-server/server-storage-plugin/pom.xml
+++ b/oap-server/server-storage-plugin/pom.xml
@@ -30,8 +30,6 @@
     <modules>
         <module>storage-jdbc-hikaricp-plugin</module>
         <module>storage-elasticsearch-plugin</module>
-        <module>storage-tidb-plugin</module>
         <module>storage-banyandb-plugin</module>
-        <module>storage-shardingsphere-plugin</module>
     </modules>
 </project>
diff --git a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/IndexController.java b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/IndexController.java
index ee150027a3..5a5b8cfa9b 100644
--- a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/IndexController.java
+++ b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/IndexController.java
@@ -18,23 +18,22 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base;
 
+import com.google.common.base.Strings;
+import lombok.Getter;
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.analysis.FunctionCategory;
-import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
-import org.apache.skywalking.oap.server.core.analysis.record.Record;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
-import com.google.common.base.Strings;
+
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.extern.slf4j.Slf4j;
 
 /**
  * The metrics data, that generated by OAL or MAL, would be partitioned to storage by the functions of the OAL or MAL.
@@ -56,8 +55,8 @@ public enum IndexController {
 
     public String getTableName(Model model) {
         if (!logicSharding) {
-            return isMetricModel(model) ? "metrics-all" :
-                (isRecordModel(model) && !model.isSuperDataset() ? "records-all" : model.getName());
+            return model.isMetric() ? "metrics-all" :
+                (model.isRecord() && !model.isSuperDataset() ? "records-all" : model.getName());
         }
         String aggFuncName = FunctionCategory.uniqueFunctionName(model.getStreamClass());
         return StringUtil.isNotBlank(aggFuncName) ? aggFuncName : model.getName();
@@ -68,10 +67,10 @@ public enum IndexController {
      * to avoid conflicts.
      */
     public String generateDocId(Model model, String originalID) {
-        if (!logicSharding && isRecordModel(model) && !model.isSuperDataset()) {
+        if (!logicSharding && model.isRecord() && !model.isSuperDataset()) {
             return this.generateDocId(model.getName(), originalID);
         }
-        if (!isMetricModel(model)) {
+        if (!model.isMetric()) {
             return originalID;
         }
         if (logicSharding && !isFunctionMetric(model)) {
@@ -87,17 +86,6 @@ public enum IndexController {
         return logicTableName + Const.ID_CONNECTOR + originalID;
     }
 
-    /**
-     * Check the mode of the Model definition.
-     */
-    public boolean isMetricModel(Model model) {
-        return Metrics.class.isAssignableFrom(model.getStreamClass());
-    }
-
-    public boolean isRecordModel(Model model) {
-        return Record.class.isAssignableFrom(model.getStreamClass());
-    }
-
     public boolean isFunctionMetric(Model model) {
         return StringUtil.isNotBlank(FunctionCategory.uniqueFunctionName(model.getStreamClass()));
     }
@@ -114,10 +102,10 @@ public enum IndexController {
      * The value of the column is the original table name in other storages.
      */
     public Map<String, Object> appendTableColumn(Model model, Map<String, Object> columns) {
-        if ((!isLogicSharding() && isMetricModel(model)) || (isLogicSharding() && isFunctionMetric(model))) {
+        if ((!isLogicSharding() && model.isMetric()) || (isLogicSharding() && isFunctionMetric(model))) {
             columns.put(LogicIndicesRegister.METRIC_TABLE_NAME, model.getName());
         }
-        if (!logicSharding && isRecordModel(model) && !model.isSuperDataset()) {
+        if (!logicSharding && model.isRecord() && !model.isSuperDataset()) {
             columns.put(LogicIndicesRegister.RECORD_TABLE_NAME, model.getName());
         }
         return columns;
diff --git a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/StorageEsInstaller.java b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/StorageEsInstaller.java
index 5252e3e107..5632d6d46e 100644
--- a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/StorageEsInstaller.java
+++ b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/StorageEsInstaller.java
@@ -18,6 +18,11 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base;
 
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.base.Strings;
+import com.google.gson.Gson;
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.library.elasticsearch.response.Index;
 import org.apache.skywalking.library.elasticsearch.response.IndexTemplate;
 import org.apache.skywalking.library.elasticsearch.response.Mappings;
@@ -33,17 +38,13 @@ import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.StorageModuleElasticsearchConfig;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.google.common.base.Strings;
-import com.google.gson.Gson;
+
 import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
-import lombok.Setter;
-import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public class StorageEsInstaller extends ModelInstaller {
@@ -267,7 +268,7 @@ public class StorageEsInstaller extends ModelInstaller {
     private Map getAnalyzerSetting(Model model) throws StorageException {
         if (config.isLogicSharding() || !model.isTimeSeries()) {
             return getAnalyzerSettingByColumn(model);
-        } else if (IndexController.INSTANCE.isRecordModel(model) && model.isSuperDataset()) {
+        } else if (model.isRecord() && model.isSuperDataset()) {
             //SuperDataset doesn't merge index, the analyzer follow the column config.
             return getAnalyzerSettingByColumn(model);
         } else {
@@ -338,13 +339,13 @@ public class StorageEsInstaller extends ModelInstaller {
             }
         }
 
-        if ((IndexController.INSTANCE.isMetricModel(model) && !config.isLogicSharding())
+        if ((model.isMetric() && !config.isLogicSharding())
             || (config.isLogicSharding() && IndexController.INSTANCE.isFunctionMetric(model))) {
             Map<String, Object> column = new HashMap<>();
             column.put("type", "keyword");
             properties.put(IndexController.LogicIndicesRegister.METRIC_TABLE_NAME, column);
         }
-        if (!config.isLogicSharding() && IndexController.INSTANCE.isRecordModel(model) && !model.isSuperDataset()) {
+        if (!config.isLogicSharding() && model.isRecord() && !model.isSuperDataset()) {
             Map<String, Object> column = new HashMap<>();
             column.put("type", "keyword");
             properties.put(IndexController.LogicIndicesRegister.RECORD_TABLE_NAME, column);
diff --git a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/AggregationQueryEsDAO.java b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/AggregationQueryEsDAO.java
index e99d6a6417..3fd19217ed 100644
--- a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/AggregationQueryEsDAO.java
+++ b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/AggregationQueryEsDAO.java
@@ -18,9 +18,6 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
 import org.apache.skywalking.library.elasticsearch.requests.search.BoolQueryBuilder;
 import org.apache.skywalking.library.elasticsearch.requests.search.Query;
 import org.apache.skywalking.library.elasticsearch.requests.search.RangeQueryBuilder;
@@ -43,6 +40,10 @@ import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.IndexController;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.TimeRangeIndexNameGenerator;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 public class AggregationQueryEsDAO extends EsDAO implements IAggregationQueryDAO {
 
     public AggregationQueryEsDAO(ElasticSearchClient client) {
diff --git a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/ESEventQueryDAO.java b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/ESEventQueryDAO.java
index 5eaa18275f..f2f6168108 100644
--- a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/ESEventQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/ESEventQueryDAO.java
@@ -18,10 +18,6 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query;
 
-import java.io.IOException;
-import java.util.List;
-import java.util.Objects;
-import java.util.stream.Collectors;
 import org.apache.skywalking.library.elasticsearch.requests.search.BoolQueryBuilder;
 import org.apache.skywalking.library.elasticsearch.requests.search.Query;
 import org.apache.skywalking.library.elasticsearch.requests.search.Search;
@@ -30,6 +26,7 @@ import org.apache.skywalking.library.elasticsearch.requests.search.Sort;
 import org.apache.skywalking.library.elasticsearch.response.search.SearchHit;
 import org.apache.skywalking.library.elasticsearch.response.search.SearchResponse;
 import org.apache.skywalking.oap.server.core.analysis.Layer;
+import org.apache.skywalking.oap.server.core.analysis.metrics.Event;
 import org.apache.skywalking.oap.server.core.query.PaginationUtils;
 import org.apache.skywalking.oap.server.core.query.enumeration.Order;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
@@ -37,13 +34,17 @@ import org.apache.skywalking.oap.server.core.query.type.event.EventQueryConditio
 import org.apache.skywalking.oap.server.core.query.type.event.EventType;
 import org.apache.skywalking.oap.server.core.query.type.event.Events;
 import org.apache.skywalking.oap.server.core.query.type.event.Source;
-import org.apache.skywalking.oap.server.core.analysis.metrics.Event;
 import org.apache.skywalking.oap.server.core.storage.query.IEventQueryDAO;
 import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.IndexController;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.MatchCNameBuilder;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
 import static com.google.common.base.Strings.isNullOrEmpty;
 import static java.util.Objects.isNull;
 
@@ -69,11 +70,10 @@ public class ESEventQueryDAO extends EsDAO implements IEventQueryDAO {
         final String index =
             IndexController.LogicIndicesRegister.getPhysicalTableName(Event.INDEX_NAME);
         final SearchResponse response = getClient().search(index, searchBuilder.build());
-        final Events events = new Events();
-        events.setEvents(response.getHits().getHits().stream()
-                                 .map(this::parseSearchHit)
-                                 .collect(Collectors.toList()));
-        return events;
+        return new Events(
+            response.getHits().getHits().stream()
+                    .map(this::parseSearchHit)
+                    .collect(Collectors.toList()));
     }
 
     private void buildMustQueryListByCondition(final EventQueryCondition condition,
diff --git a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/RecordsQueryEsDAO.java b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/RecordsQueryEsDAO.java
index dc25175c2c..e2e0be0a62 100644
--- a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/RecordsQueryEsDAO.java
+++ b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/query/RecordsQueryEsDAO.java
@@ -18,13 +18,6 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.skywalking.oap.server.core.query.input.RecordCondition;
-import org.apache.skywalking.oap.server.core.query.type.Record;
 import org.apache.skywalking.library.elasticsearch.requests.search.BoolQueryBuilder;
 import org.apache.skywalking.library.elasticsearch.requests.search.Query;
 import org.apache.skywalking.library.elasticsearch.requests.search.Search;
@@ -35,12 +28,19 @@ import org.apache.skywalking.library.elasticsearch.response.search.SearchRespons
 import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
 import org.apache.skywalking.oap.server.core.query.enumeration.Order;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
+import org.apache.skywalking.oap.server.core.query.input.RecordCondition;
+import org.apache.skywalking.oap.server.core.query.type.Record;
 import org.apache.skywalking.oap.server.core.storage.query.IRecordsQueryDAO;
 import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.EsDAO;
 import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.IndexController;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 public class RecordsQueryEsDAO extends EsDAO implements IRecordsQueryDAO {
     public RecordsQueryEsDAO(ElasticSearchClient client) {
         super(client);
@@ -83,7 +83,7 @@ public class RecordsQueryEsDAO extends EsDAO implements IRecordsQueryDAO {
             final String refId = (String) sourceAsMap.get(TopN.TRACE_ID);
             record.setRefId(StringUtil.isEmpty(refId) ? "" : refId);
             record.setId(record.getRefId());
-            record.setValue(sourceAsMap.get(valueColumnName).toString());
+            record.setValue(sourceAsMap.getOrDefault(valueColumnName, "0").toString());
             results.add(record);
         }
 
diff --git a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/test/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/TimeSeriesUtilsTest.java b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/test/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/TimeSeriesUtilsTest.java
index 97607715f1..931edd6615 100644
--- a/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/test/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/TimeSeriesUtilsTest.java
+++ b/oap-server/server-storage-plugin/storage-elasticsearch-plugin/src/test/java/org/apache/skywalking/oap/server/storage/plugin/elasticsearch/base/TimeSeriesUtilsTest.java
@@ -43,15 +43,15 @@ public class TimeSeriesUtilsTest {
     @BeforeEach
     public void prepare() {
         superDatasetModel = new Model("superDatasetModel", Lists.newArrayList(),
-                                      0, DownSampling.Second, true, true, Record.class, true,
+                                      0, DownSampling.Second, true, Record.class, true,
                                       new SQLDatabaseModelExtension(), new BanyanDBModelExtension(), new ElasticSearchModelExtension()
         );
         normalRecordModel = new Model("normalRecordModel", Lists.newArrayList(),
-                                      0, DownSampling.Second, true, false, Record.class, true,
+                                      0, DownSampling.Second, false, Record.class, true,
                                       new SQLDatabaseModelExtension(), new BanyanDBModelExtension(), new ElasticSearchModelExtension()
         );
         normalMetricsModel = new Model("normalMetricsModel", Lists.newArrayList(),
-                                       0, DownSampling.Minute, false, false, Metrics.class, true,
+                                       0, DownSampling.Minute, false, Metrics.class, true,
                                        new SQLDatabaseModelExtension(), new BanyanDBModelExtension(), new ElasticSearchModelExtension()
         );
         TimeSeriesUtils.setSUPER_DATASET_DAY_STEP(1);
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/pom.xml b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/pom.xml
index e73fb090c1..9bb11f774a 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/pom.xml
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/pom.xml
@@ -48,14 +48,15 @@
             <groupId>com.h2database</groupId>
             <artifactId>h2</artifactId>
         </dependency>
-<!--        <dependency>-->
-<!--        <groupId>mysql</groupId>-->
-<!--        <artifactId>mysql-connector-java</artifactId>-->
-<!--        <version>8.0.13</version>-->
-<!--        </dependency>-->
         <dependency>
             <groupId>org.postgresql</groupId>
             <artifactId>postgresql</artifactId>
         </dependency>
+
+        <dependency>
+            <groupId>org.testcontainers</groupId>
+            <artifactId>postgresql</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 </project>
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/BatchSQLExecutor.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/BatchSQLExecutor.java
index 4061ae7405..ea4993d9a9 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/BatchSQLExecutor.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/BatchSQLExecutor.java
@@ -18,44 +18,50 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc;
 
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.UnexpectedException;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
 import org.apache.skywalking.oap.server.library.client.request.PrepareRequest;
 import org.apache.skywalking.oap.server.library.client.request.UpdateRequest;
 
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * A Batch SQL executor.
  */
 @Slf4j
 @RequiredArgsConstructor
 public class BatchSQLExecutor implements InsertRequest, UpdateRequest {
-
+    private final JDBCClient jdbcClient;
     private final List<PrepareRequest> prepareRequests;
 
-    public void invoke(Connection connection, int maxBatchSqlSize) throws SQLException {
+    public void invoke(int maxBatchSqlSize) throws Exception {
         if (log.isDebugEnabled()) {
             log.debug("execute sql batch. sql by key size: {}", prepareRequests.size());
         }
         if (prepareRequests.size() == 0) {
             return;
         }
-        String sql = prepareRequests.get(0).toString();
-        List<PrepareRequest> bulkRequest = new ArrayList<>(maxBatchSqlSize);
-        try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
-            int pendingCount = 0;
-            for (int k = 0; k < prepareRequests.size(); k++) {
-                SQLExecutor sqlExecutor = (SQLExecutor) prepareRequests.get(k);
+        final var sql = prepareRequests.get(0).toString();
+        final var bulkRequest = new ArrayList<PrepareRequest>(maxBatchSqlSize);
+        try (final var connection = jdbcClient.getConnection();
+             final var preparedStatement = connection.prepareStatement(sql)) {
+            var pendingCount = 0;
+            for (final var prepareRequest : prepareRequests) {
+                final var sqlExecutor = (SQLExecutor) prepareRequest;
+                if (log.isDebugEnabled()) {
+                    log.debug("Executing sql: {}", sql);
+                    log.debug("SQL parameters: {}", sqlExecutor.getParam());
+                }
                 sqlExecutor.setParameters(preparedStatement);
                 preparedStatement.addBatch();
                 bulkRequest.add(sqlExecutor);
-                if (k > 0 && k % maxBatchSqlSize == 0) {
+                if (bulkRequest.size() == maxBatchSqlSize) {
                     executeBatch(preparedStatement, maxBatchSqlSize, sql, bulkRequest);
                     bulkRequest.clear();
                     pendingCount = 0;
@@ -74,9 +80,9 @@ public class BatchSQLExecutor implements InsertRequest, UpdateRequest {
                               int pendingCount,
                               String sql,
                               List<PrepareRequest> bulkRequest) throws SQLException {
-        long start = System.currentTimeMillis();
-        final int[] executeBatchResults = preparedStatement.executeBatch();
-        boolean isInsert = bulkRequest.get(0) instanceof InsertRequest;
+        final var start = System.currentTimeMillis();
+        final var executeBatchResults = preparedStatement.executeBatch();
+        final var isInsert = bulkRequest.get(0) instanceof InsertRequest;
         for (int i = 0; i < executeBatchResults.length; i++) {
             if (executeBatchResults[i] == 1 && isInsert) {
                 // Insert successfully.
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLBuilder.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLBuilder.java
index c345680592..7c60ffe2f4 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLBuilder.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLBuilder.java
@@ -22,17 +22,16 @@ package org.apache.skywalking.oap.server.storage.plugin.jdbc;
  * SQLBuilder
  */
 public class SQLBuilder {
-    private static String LINE_END = System.lineSeparator();
+    private static final String LINE_END = System.lineSeparator();
 
-    private StringBuilder text;
+    private final StringBuilder text;
 
     public SQLBuilder() {
-        this.text = new StringBuilder();
+        text = new StringBuilder();
     }
 
-    public SQLBuilder(String initLine) {
-        this();
-        this.appendLine(initLine);
+    public SQLBuilder(String initText) {
+        text = new StringBuilder(initText);
     }
 
     public SQLBuilder append(String fragment) {
@@ -45,10 +44,6 @@ public class SQLBuilder {
         return this;
     }
 
-    public String toStringInNewLine() {
-        return LINE_END + toString();
-    }
-
     @Override
     public String toString() {
         return text.toString();
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLExecutor.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLExecutor.java
index 266350eb45..f6b2ef037b 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLExecutor.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/SQLExecutor.java
@@ -18,11 +18,6 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc;
 
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
 import lombok.RequiredArgsConstructor;
@@ -31,6 +26,12 @@ import org.apache.skywalking.oap.server.core.storage.SessionCacheCallback;
 import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
 import org.apache.skywalking.oap.server.library.client.request.UpdateRequest;
 
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * A SQL executor.
  */
@@ -39,16 +40,17 @@ import org.apache.skywalking.oap.server.library.client.request.UpdateRequest;
 @Slf4j
 public class SQLExecutor implements InsertRequest, UpdateRequest {
     private final String sql;
+    @Getter
     private final List<Object> param;
     private final SessionCacheCallback callback;
     @Getter
     private List<SQLExecutor> additionalSQLs;
 
     public void invoke(Connection connection) throws SQLException {
-        PreparedStatement preparedStatement = connection.prepareStatement(sql);
+        final var preparedStatement = connection.prepareStatement(sql);
         setParameters(preparedStatement);
         if (log.isDebugEnabled()) {
-            log.debug("execute sql in batch: {}, parameters: {}", sql, param);
+            log.debug("Executing sql in batch: {}, parameters: {}", sql, param);
         }
         preparedStatement.execute();
         if (additionalSQLs != null) {
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/TableMetaInfo.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/TableMetaInfo.java
index 0df86b272c..559d7cf391 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/TableMetaInfo.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/TableMetaInfo.java
@@ -18,16 +18,17 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc;
 
+import org.apache.skywalking.oap.server.core.storage.model.Model;
+
 import java.util.HashMap;
 import java.util.Map;
-import org.apache.skywalking.oap.server.core.storage.model.Model;
 
 /**
  * Consider there additional table columns need to remove from model columns, SQL storage implementation
  * should get model from here.
  */
 public class TableMetaInfo {
-    private static Map<String, Model> TABLES = new HashMap<>();
+    private static final Map<String, Model> TABLES = new HashMap<>();
 
     public static void addModel(Model model) {
         // remove exclude columns according to @SQLDatabase.AdditionalEntity
diff --git a/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/JDBCClientException.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCEntityConverters.java
similarity index 54%
rename from oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/JDBCClientException.java
rename to oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCEntityConverters.java
index d88a85e16f..61084c5ad9 100644
--- a/oap-server/server-library/library-client/src/main/java/org/apache/skywalking/oap/server/library/client/jdbc/JDBCClientException.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCEntityConverters.java
@@ -16,18 +16,27 @@
  *
  */
 
-package org.apache.skywalking.oap.server.library.client.jdbc;
+package org.apache.skywalking.oap.server.storage.plugin.jdbc.common;
 
-import java.io.IOException;
+import lombok.SneakyThrows;
+import org.apache.skywalking.oap.server.core.storage.type.Convert2Entity;
 
-public class JDBCClientException extends IOException {
+import java.sql.ResultSet;
 
-    public JDBCClientException(String message) {
-        super(message);
-    }
+public class JDBCEntityConverters {
+    public static Convert2Entity toEntity(ResultSet resultSet) {
+        return new Convert2Entity() {
+            @Override
+            @SneakyThrows
+            public Object get(String fieldName) {
+                return resultSet.getObject(fieldName);
+            }
 
-    public JDBCClientException(String message, Throwable cause) {
-        super(message, cause);
+            @Override
+            @SneakyThrows
+            public byte[] getBytes(String fieldName) {
+                return resultSet.getBytes(fieldName);
+            }
+        };
     }
 }
-
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCStorageProvider.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCStorageProvider.java
index 8a8a10815c..669b1a7075 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCStorageProvider.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCStorageProvider.java
@@ -43,13 +43,13 @@ import org.apache.skywalking.oap.server.core.storage.query.IEventQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.IMetadataQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.IMetricsQueryDAO;
+import org.apache.skywalking.oap.server.core.storage.query.IRecordsQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.ISpanAttachedEventQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.ITagAutoCompleteQueryDAO;
-import org.apache.skywalking.oap.server.core.storage.query.IRecordsQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.ITopologyQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.ITraceQueryDAO;
 import org.apache.skywalking.oap.server.core.storage.query.IZipkinQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.module.ModuleDefine;
 import org.apache.skywalking.oap.server.library.module.ModuleProvider;
 import org.apache.skywalking.oap.server.library.module.ModuleStartException;
@@ -84,10 +84,13 @@ import org.apache.skywalking.oap.server.telemetry.api.HealthCheckMetrics;
 import org.apache.skywalking.oap.server.telemetry.api.MetricsCreator;
 import org.apache.skywalking.oap.server.telemetry.api.MetricsTag;
 
+import java.time.Clock;
+
 public abstract class JDBCStorageProvider extends ModuleProvider {
     protected JDBCStorageConfig config;
-    protected JDBCHikariCPClient jdbcClient;
-    protected ModelInstaller modelInstaller;
+    protected JDBCClient jdbcClient;
+    protected JDBCTableInstaller modelInstaller;
+    protected TableHelper tableHelper;
 
     /**
      * Different storage implementations have different ways to create the tables/indices,
@@ -117,8 +120,9 @@ public abstract class JDBCStorageProvider extends ModuleProvider {
 
     @Override
     public void prepare() throws ServiceNotProvidedException, ModuleStartException {
-        jdbcClient = new JDBCHikariCPClient(config.getProperties());
-        modelInstaller = createModelInstaller();
+        jdbcClient = new JDBCClient(config.getProperties());
+        modelInstaller = (JDBCTableInstaller) createModelInstaller();
+        tableHelper = new TableHelper(getManager(), jdbcClient);
 
         this.registerServiceImplementation(
             StorageBuilderFactory.class,
@@ -136,77 +140,77 @@ public abstract class JDBCStorageProvider extends ModuleProvider {
 
         this.registerServiceImplementation(
             INetworkAddressAliasDAO.class,
-            new JDBCNetworkAddressAliasDAO(jdbcClient));
+            new JDBCNetworkAddressAliasDAO(jdbcClient, getManager()));
 
         this.registerServiceImplementation(
             ITopologyQueryDAO.class,
-            new JDBCTopologyQueryDAO(jdbcClient));
+            new JDBCTopologyQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IMetricsQueryDAO.class,
-            new JDBCMetricsQueryDAO(jdbcClient));
+            new JDBCMetricsQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             ITraceQueryDAO.class,
-            new JDBCTraceQueryDAO(getManager(), jdbcClient));
+            new JDBCTraceQueryDAO(getManager(), jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IBrowserLogQueryDAO.class,
-            new JDBCBrowserLogQueryDAO(jdbcClient));
+            new JDBCBrowserLogQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IMetadataQueryDAO.class,
-            new JDBCMetadataQueryDAO(jdbcClient, config.getMetadataQueryMaxSize()));
+            new JDBCMetadataQueryDAO(jdbcClient, config.getMetadataQueryMaxSize(), getManager()));
         this.registerServiceImplementation(
             IAggregationQueryDAO.class,
-            new JDBCAggregationQueryDAO(jdbcClient));
+            new JDBCAggregationQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IAlarmQueryDAO.class,
-            new JDBCAlarmQueryDAO(jdbcClient, getManager()));
+            new JDBCAlarmQueryDAO(jdbcClient, getManager(), tableHelper));
         this.registerServiceImplementation(
             IHistoryDeleteDAO.class,
-            new JDBCHistoryDeleteDAO(jdbcClient));
+            new JDBCHistoryDeleteDAO(jdbcClient, tableHelper, modelInstaller, Clock.systemDefaultZone()));
         this.registerServiceImplementation(
             IRecordsQueryDAO.class,
-            new JDBCRecordsQueryDAO(jdbcClient));
+            new JDBCRecordsQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             ILogQueryDAO.class,
-            new JDBCLogQueryDAO(jdbcClient, getManager()));
+            new JDBCLogQueryDAO(jdbcClient, getManager(), tableHelper));
 
         this.registerServiceImplementation(
             IProfileTaskQueryDAO.class,
-            new JDBCProfileTaskQueryDAO(jdbcClient));
+            new JDBCProfileTaskQueryDAO(jdbcClient, getManager()));
         this.registerServiceImplementation(
             IProfileTaskLogQueryDAO.class,
-            new JDBCProfileTaskLogQueryDAO(jdbcClient));
+            new JDBCProfileTaskLogQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IProfileThreadSnapshotQueryDAO.class,
-            new JDBCProfileThreadSnapshotQueryDAO(jdbcClient));
+            new JDBCProfileThreadSnapshotQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             UITemplateManagementDAO.class,
-            new JDBCUITemplateManagementDAO(jdbcClient));
+            new JDBCUITemplateManagementDAO(jdbcClient, tableHelper));
 
         this.registerServiceImplementation(
             IEventQueryDAO.class,
-            new JDBCEventQueryDAO(jdbcClient));
+            new JDBCEventQueryDAO(jdbcClient, tableHelper));
 
         this.registerServiceImplementation(
             IEBPFProfilingTaskDAO.class,
-            new JDBCEBPFProfilingTaskDAO(jdbcClient));
+            new JDBCEBPFProfilingTaskDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IEBPFProfilingScheduleDAO.class,
-            new JDBCEBPFProfilingScheduleDAO(jdbcClient));
+            new JDBCEBPFProfilingScheduleDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IEBPFProfilingDataDAO.class,
-            new JDBCEBPFProfilingDataDAO(jdbcClient));
+            new JDBCEBPFProfilingDataDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IServiceLabelDAO.class,
-            new JDBCServiceLabelQueryDAO(jdbcClient));
+            new JDBCServiceLabelQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             ITagAutoCompleteQueryDAO.class,
-            new JDBCTagAutoCompleteQueryDAO(jdbcClient));
+            new JDBCTagAutoCompleteQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             IZipkinQueryDAO.class,
-            new JDBCZipkinQueryDAO(jdbcClient));
+            new JDBCZipkinQueryDAO(jdbcClient, tableHelper));
         this.registerServiceImplementation(
             ISpanAttachedEventQueryDAO.class,
-            new JDBCSpanAttachedEventQueryDAO(jdbcClient));
+            new JDBCSpanAttachedEventQueryDAO(jdbcClient, tableHelper));
     }
 
     @Override
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCTableInstaller.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCTableInstaller.java
new file mode 100644
index 0000000000..956aec6585
--- /dev/null
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/JDBCTableInstaller.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.oap.server.storage.plugin.jdbc.common;
+
+import com.google.gson.JsonObject;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.skywalking.oap.server.core.analysis.Layer;
+import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
+import org.apache.skywalking.oap.server.core.storage.model.ColumnName;
+import org.apache.skywalking.oap.server.core.storage.model.Model;
+import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
+import org.apache.skywalking.oap.server.core.storage.model.ModelInstaller;
+import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
+import org.apache.skywalking.oap.server.library.client.Client;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.library.module.ModuleManager;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLBuilder;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.TableMetaInfo;
+
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+
+import static java.util.function.Predicate.not;
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toCollection;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toSet;
+
+/**
+ * JDBC table installer, uses standard SQL to create tables, indices, and views.
+ */
+@Slf4j
+public class JDBCTableInstaller extends ModelInstaller {
+    public static final String ID_COLUMN = Metrics.ID;
+    public static final String TABLE_COLUMN = "table_name";
+
+    public JDBCTableInstaller(Client client, ModuleManager moduleManager) {
+        super(client, moduleManager);
+    }
+
+    @Override
+    @SneakyThrows
+    public boolean isExists(Model model) {
+        TableMetaInfo.addModel(model);
+
+        final var table = TableHelper.getLatestTableForWrite(model);
+
+        final var jdbcClient = (JDBCClient) client;
+        if (!jdbcClient.tableExists(table)) {
+            return false;
+        }
+
+        final var databaseColumns = getDatabaseColumns(table);
+        final var isAnyColumnNotCreated =
+            model
+                .getColumns().stream()
+                .map(ModelColumn::getColumnName)
+                .map(ColumnName::getStorageName)
+                .anyMatch(not(databaseColumns::contains));
+
+        return !isAnyColumnNotCreated;
+    }
+
+    @Override
+    @SneakyThrows
+    public void createTable(Model model) {
+        final var table = TableHelper.getLatestTableForWrite(model);
+        createTable(model, table);
+    }
+
+    @SneakyThrows
+    public void createTable(Model model, String table) {
+        createOrUpdateTable(table, model.getColumns(), false);
+        createOrUpdateTableIndexes(table, model.getColumns(), false);
+        createAdditionalTable(model);
+    }
+
+    public String getColumnDefinition(ModelColumn column) {
+        return getColumnDefinition(column, column.getType(), column.getGenericType());
+    }
+
+    protected String getColumnDefinition(ModelColumn column, Class<?> type, Type genericType) {
+        final String storageName = column.getColumnName().getStorageName();
+        if (Integer.class.equals(type) || int.class.equals(type) || Layer.class.equals(type)) {
+            return storageName + " INT";
+        } else if (Long.class.equals(type) || long.class.equals(type)) {
+            return storageName + " BIGINT";
+        } else if (Double.class.equals(type) || double.class.equals(type)) {
+            return storageName + " DOUBLE";
+        } else if (String.class.equals(type)) {
+            return storageName + " VARCHAR(" + column.getLength() + ")";
+        } else if (StorageDataComplexObject.class.isAssignableFrom(type)) {
+            return storageName + " VARCHAR(20000)";
+        } else if (byte[].class.equals(type)) {
+            return storageName + " MEDIUMTEXT";
+        } else if (JsonObject.class.equals(type)) {
+            return storageName + " VARCHAR(" + column.getLength() + ")";
+        } else if (List.class.isAssignableFrom(type)) {
+            final Type elementType = ((ParameterizedType) genericType).getActualTypeArguments()[0];
+            return getColumnDefinition(column, (Class<?>) elementType, elementType);
+        } else {
+            throw new IllegalArgumentException("Unsupported data type: " + type.getName());
+        }
+    }
+
+    public void createOrUpdateTableIndexes(String table, List<ModelColumn> columns,
+                                           boolean isAdditionalTable) throws SQLException {
+        final var jdbcClient = (JDBCClient) client;
+
+        // Additional table's id is a many-to-one relation to the main table's id,
+        // and thus can not be primary key, but a simple index.
+        if (isAdditionalTable) {
+            final var index = "idx_" + Math.abs((table + "_" + JDBCTableInstaller.ID_COLUMN).hashCode());
+            if (!jdbcClient.indexExists(table, index)) {
+                executeSQL(
+                    new SQLBuilder("CREATE INDEX ")
+                        .append(index)
+                        .append(" ON ").append(table)
+                        .append("(")
+                        .append(ID_COLUMN)
+                        .append(")")
+                );
+            }
+        }
+
+        if (!isAdditionalTable) {
+            final var index = "idx_" + Math.abs((table + "_" + JDBCTableInstaller.TABLE_COLUMN).hashCode());
+            if (!jdbcClient.indexExists(table, index)) {
+                executeSQL(
+                    new SQLBuilder("CREATE INDEX ")
+                        .append(index)
+                        .append(" ON ")
+                        .append(table)
+                        .append("(")
+                        .append(JDBCTableInstaller.TABLE_COLUMN)
+                        .append(")")
+                );
+            }
+        }
+
+        final var columnsMissingIndex =
+            columns
+                .stream()
+                .filter(ModelColumn::shouldIndex)
+                .filter(it -> it.getLength() < 256)
+                .map(ModelColumn::getColumnName)
+                .map(ColumnName::getStorageName)
+                .collect(toList());
+        for (var column : columnsMissingIndex) {
+            final var index = "idx_" + Math.abs((table + "_" + column).hashCode());
+            if (!jdbcClient.indexExists(table, index)) {
+                executeSQL(
+                    new SQLBuilder("CREATE INDEX ")
+                        .append(index)
+                        .append(" ON ").append(table).append("(")
+                        .append(column)
+                        .append(")")
+                );
+            }
+        }
+
+        final var columnNames =
+            columns
+                .stream()
+                .map(ModelColumn::getColumnName)
+                .map(ColumnName::getStorageName)
+                .collect(toSet());
+        for (final var modelColumn : columns) {
+            for (final var compositeIndex : modelColumn.getSqlDatabaseExtension().getIndices()) {
+                final var multiColumns = Arrays.asList(compositeIndex.getColumns());
+                // Don't create composite index on the additional table if it doesn't contain all needed columns.
+                if (isAdditionalTable && !columnNames.containsAll(multiColumns)) {
+                    continue;
+                }
+                final var index = "idx_" + Math.abs((table + "_" + String.join("_", multiColumns)).hashCode());
+                if (jdbcClient.indexExists(table, index)) {
+                    continue;
+                }
+                executeSQL(
+                    new SQLBuilder("CREATE INDEX ")
+                        .append(index)
+                        .append(" ON ")
+                        .append(table)
+                        .append(multiColumns.stream().collect(joining(", ", " (", ")")))
+                );
+            }
+        }
+    }
+
+    public void executeSQL(SQLBuilder sql) throws SQLException {
+        final var c = (JDBCClient) client;
+        c.execute(sql.toString());
+    }
+
+    public void createAdditionalTable(Model model) throws SQLException {
+        final var additionalTables = model.getSqlDBModelExtension().getAdditionalTables();
+        for (final var table : additionalTables.values()) {
+            final var tableName = TableHelper.getLatestTableForWrite(table.getName());
+            createOrUpdateTable(tableName, table.getColumns(), true);
+            createOrUpdateTableIndexes(tableName, table.getColumns(), true);
+        }
+    }
+
+    @SneakyThrows
+    public void createOrUpdateTable(String table, List<ModelColumn> columns, boolean isAdditionalTable) {
+        // Some SQL implementations don't have the syntax "alter table <table> add column if not exists",
+        // we have to query the columns and filter out the existing ones.
+        final var columnsToBeAdded = new ArrayList<>(columns);
+        final var existingColumns = getDatabaseColumns(table);
+        columnsToBeAdded.removeIf(it -> existingColumns.contains(it.getColumnName().getStorageName()));
+
+        final var jdbcClient = (JDBCClient) client;
+        if (!jdbcClient.tableExists(table)) {
+            createTable(table, columnsToBeAdded, isAdditionalTable);
+        } else {
+            updateTable(table, columnsToBeAdded);
+        }
+    }
+
+    protected Set<String> getDatabaseColumns(String table) throws SQLException {
+        final var jdbcClient = (JDBCClient) client;
+        return jdbcClient.getTableColumns(table);
+    }
+
+    private void updateTable(String table, List<ModelColumn> columns) throws SQLException {
+        final var alterSqls =
+            columns
+                .stream()
+                .map(this::getColumnDefinition)
+                .map(definition -> "ALTER TABLE " + table + " ADD COLUMN " + definition + "; ")
+                .collect(toList());
+
+        for (String alterSql : alterSqls) {
+            executeSQL(new SQLBuilder(alterSql));
+        }
+    }
+
+    private void createTable(String table, List<ModelColumn> columns, boolean isAdditionalTable) throws SQLException {
+        final var columnDefinitions = new ArrayList<String>();
+        columnDefinitions.add(ID_COLUMN + " VARCHAR(512)" + (!isAdditionalTable ? " PRIMARY KEY" : ""));
+        if (!isAdditionalTable) {
+            columnDefinitions.add(TABLE_COLUMN + " VARCHAR(512)");
+        }
+        columns
+            .stream()
+            .map(this::getColumnDefinition)
+            .collect(toCollection(() -> columnDefinitions));
+
+        final var sql =
+            new SQLBuilder("CREATE TABLE IF NOT EXISTS " + table)
+                .append(columnDefinitions.stream().collect(joining(", ", " (", ");")));
+
+        executeSQL(sql);
+    }
+}
diff --git a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/SQLAndParameters.java
similarity index 70%
copy from oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java
copy to oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/SQLAndParameters.java
index e9d6759a50..15c3c4df09 100644
--- a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/Logs.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/SQLAndParameters.java
@@ -16,22 +16,24 @@
  *
  */
 
-package org.apache.skywalking.oap.server.core.query.type;
+package org.apache.skywalking.oap.server.storage.plugin.jdbc.common;
 
-import java.util.ArrayList;
-import java.util.List;
 import lombok.Getter;
-import lombok.Setter;
+import lombok.NonNull;
+import lombok.RequiredArgsConstructor;
 import lombok.experimental.Accessors;
 
-@Setter
-@Getter
-@Accessors(chain = true)
-public class Logs {
-    private final List<Log> logs;
-    private String errorReason;
+import java.util.List;
+
+@Accessors(fluent = true)
+@RequiredArgsConstructor
+public class SQLAndParameters {
+    @Getter
+    private final String sql;
+    @NonNull
+    private final List<Object> parameters;
 
-    public Logs() {
-        this.logs = new ArrayList<>();
+    public Object[] parameters() {
+        return parameters.toArray();
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/TableHelper.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/TableHelper.java
new file mode 100644
index 0000000000..cac5de380e
--- /dev/null
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/TableHelper.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.oap.server.storage.plugin.jdbc.common;
+
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import com.google.common.collect.Range;
+import lombok.AccessLevel;
+import lombok.Getter;
+import lombok.NonNull;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.skywalking.oap.server.core.Const;
+import org.apache.skywalking.oap.server.core.CoreModule;
+import org.apache.skywalking.oap.server.core.analysis.DownSampling;
+import org.apache.skywalking.oap.server.core.analysis.FunctionCategory;
+import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
+import org.apache.skywalking.oap.server.core.config.ConfigService;
+import org.apache.skywalking.oap.server.core.storage.model.Model;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.library.module.ModuleManager;
+import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.TableMetaInfo;
+
+import java.time.Duration;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.LongStream;
+
+import static java.util.stream.Collectors.toList;
+
+/**
+ * Utility class to get table name for a given model.
+ */
+@Slf4j
+@RequiredArgsConstructor
+public class TableHelper {
+    private final ModuleManager moduleManager;
+    private final JDBCClient jdbcClient;
+
+    @Getter(lazy = true, value = AccessLevel.PRIVATE)
+    private final ConfigService configService = moduleManager.find(CoreModule.NAME).provider().getService(ConfigService.class);
+
+    private final LoadingCache<String, Boolean> tableExistence =
+        CacheBuilder.newBuilder()
+                    .expireAfterAccess(Duration.ofMinutes(10))
+                    .build(new CacheLoader<>() {
+                        @Override
+                        public @NonNull Boolean load(@NonNull String tableName) throws Exception {
+                            return jdbcClient.tableExists(tableName);
+                        }
+                    });
+
+    public static String getTableName(Model model) {
+        final var aggFuncName = FunctionCategory.uniqueFunctionName(model.getStreamClass()).replaceAll("-", "_");
+        return StringUtil.isNotBlank(aggFuncName) ? aggFuncName : model.getName();
+    }
+
+    public static String getLatestTableForWrite(Model model) {
+        final var tableName = getTableName(model);
+
+        if (!model.isTimeSeries()) {
+            return tableName;
+        }
+
+        final var dayTimeBucket = TimeBucket.getTimeBucket(System.currentTimeMillis(), DownSampling.Day);
+        return tableName + Const.UNDERSCORE + dayTimeBucket;
+    }
+
+    /**
+     * @param rawTableName the table name without time bucket suffix.
+     * @return the table name with time bucket.
+     */
+    public static String getLatestTableForWrite(String rawTableName) {
+        final var dayTimeBucket = TimeBucket.getTimeBucket(System.currentTimeMillis(), DownSampling.Day);
+        return rawTableName + Const.UNDERSCORE + dayTimeBucket;
+    }
+
+    public static String getTable(Model model, long timeBucket) {
+        final var tableName = getTableName(model);
+        if (timeBucket == 0) {
+            timeBucket = TimeBucket.getTimeBucket(System.currentTimeMillis(), DownSampling.Day);
+        }
+
+        if (!model.isTimeSeries()) {
+            return tableName;
+        }
+
+        return tableName + Const.UNDERSCORE + TimeBucket.getTimeBucket(TimeBucket.getTimestamp(timeBucket), DownSampling.Day);
+    }
+
+    public static String getTable(String rawTableName, long timeBucket) {
+        if (timeBucket == 0) {
+            timeBucket = TimeBucket.getTimeBucket(System.currentTimeMillis(), DownSampling.Day);
+        }
+
+        return rawTableName + Const.UNDERSCORE + TimeBucket.getTimeBucket(TimeBucket.getTimestamp(timeBucket), DownSampling.Day);
+    }
+
+    public List<String> getTablesForRead(String modelName, long timeBucketStart, long timeBucketEnd) {
+        final var model = TableMetaInfo.get(modelName);
+        final var tableName = getTableName(model);
+
+        if (!model.isTimeSeries()) {
+            return Collections.singletonList(tableName);
+        }
+
+        timeBucketStart = TimeBucket.getTimeBucket(TimeBucket.getTimestamp(timeBucketStart), DownSampling.Day);
+        timeBucketEnd = TimeBucket.getTimeBucket(TimeBucket.getTimestamp(timeBucketEnd), DownSampling.Day);
+
+        final var ttlTimeBucketRange = getTTLTimeBucketRange(model);
+
+        return LongStream
+            .rangeClosed(timeBucketStart, timeBucketEnd)
+            .distinct()
+            .filter(ttlTimeBucketRange::contains)
+            .mapToObj(it -> tableName + "_" + it)
+            .filter(table -> {
+                try {
+                    return tableExistence.get(table);
+                } catch (Exception e) {
+                    throw new RuntimeException(e);
+                }
+            })
+            .collect(toList());
+    }
+
+    public List<String> getTablesWithinTTL(String modelName) {
+        final var model = TableMetaInfo.get(modelName);
+        final var range = getTTLTimeBucketRange(model);
+        return getTablesForRead(modelName, range.lowerEndpoint(), range.upperEndpoint());
+    }
+
+    public static String generateId(Model model, String originalID) {
+        if (model.isRecord() && !model.isSuperDataset()) {
+            return generateId(model.getName(), originalID);
+        }
+        if (!model.isMetric() || !isFunctionMetric(model)) {
+            return originalID;
+        }
+        return generateId(model.getName(), originalID);
+    }
+
+    public static String generateId(String modelName, String originalID) {
+        return modelName + Const.ID_CONNECTOR + originalID;
+    }
+
+    public static boolean isFunctionMetric(Model model) {
+        return StringUtil.isNotBlank(FunctionCategory.uniqueFunctionName(model.getStreamClass()));
+    }
+
+    public static long getTimeBucket(String table) {
+        final var split = table.split("_");
+        return Long.parseLong(split[split.length - 1]);
+    }
+
+    Range<Long> getTTLTimeBucketRange(Model model) {
+        final var ttl = model.isRecord() ?
+            getConfigService().getRecordDataTTL() :
+            getConfigService().getMetricsDataTTL();
+        final var timeBucketEnd = TimeBucket.getTimeBucket(System.currentTimeMillis(), DownSampling.Day);
+        final var timeBucketStart = TimeBucket.getTimeBucket(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(ttl), DownSampling.Day);
+        return Range.closed(timeBucketStart, timeBucketEnd);
+    }
+}
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAggregationQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAggregationQueryDAO.java
index 61925653e3..53b138a3b6 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAggregationQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAggregationQueryDAO.java
@@ -18,13 +18,8 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
 import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.query.enumeration.Order;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
@@ -32,60 +27,103 @@ import org.apache.skywalking.oap.server.core.query.input.TopNCondition;
 import org.apache.skywalking.oap.server.core.query.type.KeyValue;
 import org.apache.skywalking.oap.server.core.query.type.SelectedRecord;
 import org.apache.skywalking.oap.server.core.storage.query.IAggregationQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static java.util.Comparator.comparing;
+import static java.util.stream.Collectors.groupingBy;
 
 @RequiredArgsConstructor
 public class JDBCAggregationQueryDAO implements IAggregationQueryDAO {
-    protected final JDBCHikariCPClient jdbcClient;
+    protected final JDBCClient jdbcClient;
+    protected final TableHelper tableHelper;
 
     @Override
+    @SneakyThrows
     public List<SelectedRecord> sortMetrics(final TopNCondition metrics,
                                             final String valueColumnName,
                                             final Duration duration,
-                                            List<KeyValue> additionalConditions) throws IOException {
-        List<Object> conditions = new ArrayList<>(10);
-        StringBuilder sql = buildMetricsValueSql(valueColumnName, metrics.getName());
-        sql.append(Metrics.TIME_BUCKET).append(" >= ? and ").append(Metrics.TIME_BUCKET).append(" <= ?");
-        conditions.add(duration.getStartTimeBucket());
-        conditions.add(duration.getEndTimeBucket());
-        if (additionalConditions != null) {
-            additionalConditions.forEach(condition -> {
-                sql.append(" and ").append(condition.getKey()).append("=?");
-                conditions.add(condition.getValue());
+                                            final List<KeyValue> additionalConditions) {
+        final var results = new ArrayList<SelectedRecord>();
+        final var tables = tableHelper.getTablesForRead(
+            metrics.getName(), duration.getStartTimeBucket(), duration.getEndTimeBucket()
+        );
+
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQL(metrics, valueColumnName, duration, additionalConditions, table);
+
+            jdbcClient.executeQuery(sqlAndParameters.sql(), resultSet -> {
+                while (resultSet.next()) {
+                    final var topNEntity = new SelectedRecord();
+                    topNEntity.setId(resultSet.getString(Metrics.ENTITY_ID));
+                    topNEntity.setValue(String.valueOf(resultSet.getInt("result")));
+                    results.add(topNEntity);
+                }
+                return null;
+            }, sqlAndParameters.parameters());
+        }
+
+        final var comparator =
+            Order.ASC.equals(metrics.getOrder()) ?
+                comparing((SelectedRecord it) -> Long.parseLong(it.getValue())) :
+                comparing((SelectedRecord it) -> Long.parseLong(it.getValue())).reversed();
+        return results
+            .stream()
+            .collect(groupingBy(SelectedRecord::getId))
+            .entrySet()
+            .stream()
+            .map(entry -> {
+                final var selectedRecord = new SelectedRecord();
+                final var average = (int) entry.getValue().stream().map(SelectedRecord::getValue).mapToLong(Long::parseLong).average().orElse(0);
+                selectedRecord.setId(entry.getKey());
+                selectedRecord.setValue(String.valueOf(average));
+                return selectedRecord;
+            })
+            .sorted(comparator)
+            .limit(metrics.getTopN())
+            .collect(Collectors.toList());
+    }
+
+    protected SQLAndParameters buildSQL(
+        final TopNCondition metrics,
+        final String valueColumnName,
+        final Duration duration,
+        final List<KeyValue> queries,
+        final String table) {
+
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>(10);
+        sql.append("select result, ").append(Metrics.ENTITY_ID)
+           .append(" from (select avg(").append(valueColumnName).append(") as result,")
+           .append(Metrics.ENTITY_ID)
+           .append(" from ").append(table)
+           .append(" where ")
+           .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?")
+           .append(" and ")
+           .append(Metrics.TIME_BUCKET).append(" >= ? ")
+           .append(" and ")
+           .append(Metrics.TIME_BUCKET).append(" <= ?");
+
+        parameters.add(metrics.getName());
+        parameters.add(duration.getStartTimeBucket());
+        parameters.add(duration.getEndTimeBucket());
+        if (queries != null) {
+            queries.forEach(query -> {
+                sql.append(" and ").append(query.getKey()).append(" = ?");
+                parameters.add(query.getValue());
             });
         }
         sql.append(" group by ").append(Metrics.ENTITY_ID);
         sql.append(")  as T order by result")
-           .append(metrics.getOrder().equals(Order.ASC) ? " asc" : " desc")
+           .append(Order.ASC.equals(metrics.getOrder()) ? " asc" : " desc")
            .append(" limit ")
            .append(metrics.getTopN());
-        List<SelectedRecord> topNEntities = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection();
-             ResultSet resultSet = jdbcClient.executeQuery(
-                 connection, sql.toString(), conditions.toArray(new Object[0]))) {
-            while (resultSet.next()) {
-                SelectedRecord topNEntity = new SelectedRecord();
-                topNEntity.setId(resultSet.getString(Metrics.ENTITY_ID));
-                topNEntity.setValue(resultSet.getString("result"));
-                topNEntities.add(topNEntity);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-        return topNEntities;
-    }
-
-    protected StringBuilder buildMetricsValueSql(String valueColumnName, String metricsName) {
-        StringBuilder sql = new StringBuilder();
-        sql.append("select result,")
-           .append(Metrics.ENTITY_ID)
-           .append(" from (select avg(")
-           .append(valueColumnName)
-           .append(") result,")
-           .append(Metrics.ENTITY_ID)
-           .append(" from ")
-           .append(metricsName)
-           .append(" where ");
-        return sql;
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAlarmQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAlarmQueryDAO.java
index 3ee3a038a4..588f393af1 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAlarmQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCAlarmQueryDAO.java
@@ -19,14 +19,9 @@
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import com.google.common.base.Strings;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Objects;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.CoreModule;
 import org.apache.skywalking.oap.server.core.alarm.AlarmRecord;
@@ -37,66 +32,125 @@ import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.type.AlarmMessage;
 import org.apache.skywalking.oap.server.core.query.type.Alarms;
 import org.apache.skywalking.oap.server.core.storage.query.IAlarmQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
 
+import static java.util.Comparator.comparing;
 import static java.util.Objects.nonNull;
-import static org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.H2TableInstaller.ID_COLUMN;
+import static java.util.function.Predicate.not;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toSet;
 
+@Slf4j
+@RequiredArgsConstructor
 public class JDBCAlarmQueryDAO implements IAlarmQueryDAO {
-    protected final JDBCHikariCPClient jdbcClient;
+    protected final JDBCClient jdbcClient;
+    protected final ModuleManager manager;
+    protected final TableHelper tableHelper;
 
-    private final ModuleManager manager;
+    private Set<String> searchableTagKeys;
 
-    private List<String> searchableTagKeys;
+    @Override
+    @SneakyThrows
+    public Alarms getAlarm(Integer scopeId, String keyword, int limit, int from,
+                           Duration duration, final List<Tag> tags) {
+        if (searchableTagKeys == null) {
+            final ConfigService configService = manager.find(CoreModule.NAME)
+                                                       .provider()
+                                                       .getService(ConfigService.class);
+            searchableTagKeys = new HashSet<>(Arrays.asList(configService.getSearchableAlarmTags().split(Const.COMMA)));
+        }
+        // If the tag is not searchable, but is required, then we don't need to run the real query.
+        if (tags != null && !searchableTagKeys.containsAll(tags.stream().map(Tag::getKey).collect(toSet()))) {
+            log.warn(
+                "Searching tags that are not searchable: {}",
+                tags.stream().map(Tag::getKey).filter(not(searchableTagKeys::contains)).collect(toSet()));
+            return new Alarms();
+        }
+
+        final var tables = tableHelper.getTablesForRead(
+            AlarmRecord.INDEX_NAME, duration.getStartTimeBucket(), duration.getEndTimeBucket()
+        );
+        final var alarmMsgs = new ArrayList<AlarmMessage>();
 
-    public JDBCAlarmQueryDAO(final JDBCHikariCPClient jdbcClient,
-                             final ModuleManager manager) {
-        this.jdbcClient = jdbcClient;
-        this.manager = manager;
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQL(scopeId, keyword, limit, from, duration, tags, table);
+            jdbcClient.executeQuery(sqlAndParameters.sql(), resultSet -> {
+                while (resultSet.next()) {
+                    final var message = new AlarmMessage();
+                    message.setId(resultSet.getString(AlarmRecord.ID0));
+                    message.setId1(resultSet.getString(AlarmRecord.ID1));
+                    message.setMessage(resultSet.getString(AlarmRecord.ALARM_MESSAGE));
+                    message.setStartTime(resultSet.getLong(AlarmRecord.START_TIME));
+                    message.setScope(Scope.Finder.valueOf(resultSet.getInt(AlarmRecord.SCOPE)));
+                    message.setScopeId(resultSet.getInt(AlarmRecord.SCOPE));
+                    String dataBinaryBase64 = resultSet.getString(AlarmRecord.TAGS_RAW_DATA);
+                    if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
+                        parserDataBinaryBase64(dataBinaryBase64, message.getTags());
+                    }
+                    alarmMsgs.add(message);
+                }
+                return null;
+            }, sqlAndParameters.parameters());
+        }
+        return new Alarms(
+            alarmMsgs
+                .stream()
+                .sorted(comparing(AlarmMessage::getStartTime).reversed())
+                .skip(from)
+                .limit(limit)
+                .collect(toList())
+        );
     }
 
-    @Override
-    public Alarms getAlarm(Integer scopeId, String keyword, int limit, int from,
-                           Duration duration, final List<Tag> tags) throws IOException {
+    protected SQLAndParameters buildSQL(Integer scopeId, String keyword, int limit, int from,
+                                        Duration duration, List<Tag> tags, String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>();
+
         long startTB = 0;
         long endTB = 0;
         if (nonNull(duration)) {
             startTB = duration.getStartTimeBucketInSec();
             endTB = duration.getEndTimeBucketInSec();
         }
-        if (searchableTagKeys == null) {
-            final ConfigService configService = manager.find(CoreModule.NAME)
-                    .provider()
-                    .getService(ConfigService.class);
-            searchableTagKeys = Arrays.asList(configService.getSearchableAlarmTags().split(Const.COMMA));
-        }
-        StringBuilder sql = new StringBuilder();
-        List<Object> parameters = new ArrayList<>(10);
-        sql.append("from ").append(AlarmRecord.INDEX_NAME);
-        /**
+        sql.append("select * from ").append(table);
+        /*
          * This is an AdditionalEntity feature, see:
          * {@link org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase.AdditionalEntity}
          */
+        final var timeBucket = TableHelper.getTimeBucket(table);
+        final var tagTable = TableHelper.getTable(AlarmRecord.ADDITIONAL_TAG_TABLE, timeBucket);
         if (!CollectionUtils.isEmpty(tags)) {
             for (int i = 0; i < tags.size(); i++) {
-                sql.append(" inner join ").append(AlarmRecord.ADDITIONAL_TAG_TABLE).append(" ");
-                sql.append(AlarmRecord.ADDITIONAL_TAG_TABLE + i);
-                sql.append(" on ").append(AlarmRecord.INDEX_NAME).append(".").append(ID_COLUMN).append(" = ");
-                sql.append(AlarmRecord.ADDITIONAL_TAG_TABLE + i).append(".").append(ID_COLUMN);
+                sql.append(" inner join ").append(tagTable).append(" ");
+                sql.append(tagTable + i);
+                sql.append(" on ").append(table).append(".").append(JDBCTableInstaller.ID_COLUMN).append(" = ");
+                sql.append(tagTable + i).append(".").append(JDBCTableInstaller.ID_COLUMN);
             }
         }
-        sql.append(" where ");
-        sql.append(" 1=1 ");
+        sql.append(" where ")
+           .append(table).append(".").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        parameters.add(AlarmRecord.INDEX_NAME);
         if (Objects.nonNull(scopeId)) {
             sql.append(" and ").append(AlarmRecord.SCOPE).append(" = ?");
-            parameters.add(scopeId.intValue());
+            parameters.add(scopeId);
         }
         if (startTB != 0 && endTB != 0) {
-            sql.append(" and ").append(AlarmRecord.INDEX_NAME).append(".").append(AlarmRecord.TIME_BUCKET).append(" >= ?");
+            sql.append(" and ").append(table).append(".").append(AlarmRecord.TIME_BUCKET).append(" >= ?");
             parameters.add(startTB);
-            sql.append(" and ").append(AlarmRecord.INDEX_NAME).append(".").append(AlarmRecord.TIME_BUCKET).append(" <= ?");
+            sql.append(" and ").append(table).append(".").append(AlarmRecord.TIME_BUCKET).append(" <= ?");
             parameters.add(endTB);
         }
 
@@ -106,49 +160,14 @@ public class JDBCAlarmQueryDAO implements IAlarmQueryDAO {
         }
         if (CollectionUtils.isNotEmpty(tags)) {
             for (int i = 0; i < tags.size(); i++) {
-                final int foundIdx = searchableTagKeys.indexOf(tags.get(i).getKey());
-                if (foundIdx > -1) {
-                    sql.append(" and ").append(AlarmRecord.ADDITIONAL_TAG_TABLE + i).append(".");
-                    sql.append(AlarmRecord.TAGS).append(" = ?");
-                    parameters.add(tags.get(i).toString());
-                } else {
-                    //If the tag is not searchable, but is required, then don't need to run the real query.
-                    return new Alarms();
-                }
+                sql.append(" and ").append(tagTable + i).append(".");
+                sql.append(AlarmRecord.TAGS).append(" = ?");
+                parameters.add(tags.get(i).toString());
             }
         }
         sql.append(" order by ").append(AlarmRecord.START_TIME).append(" desc ");
+        sql.append(" limit ").append(from + limit);
 
-        Alarms alarms = new Alarms();
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            this.buildLimit(sql, from, limit);
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(connection, "select * " + sql.toString(), parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    AlarmMessage message = new AlarmMessage();
-                    message.setId(resultSet.getString(AlarmRecord.ID0));
-                    message.setId1(resultSet.getString(AlarmRecord.ID1));
-                    message.setMessage(resultSet.getString(AlarmRecord.ALARM_MESSAGE));
-                    message.setStartTime(resultSet.getLong(AlarmRecord.START_TIME));
-                    message.setScope(Scope.Finder.valueOf(resultSet.getInt(AlarmRecord.SCOPE)));
-                    message.setScopeId(resultSet.getInt(AlarmRecord.SCOPE));
-                    String dataBinaryBase64 = resultSet.getString(AlarmRecord.TAGS_RAW_DATA);
-                    if (!com.google.common.base.Strings.isNullOrEmpty(dataBinaryBase64)) {
-                        parserDataBinaryBase64(dataBinaryBase64, message.getTags());
-                    }
-                    alarms.getMsgs().add(message);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-
-        return alarms;
-    }
-
-    protected void buildLimit(StringBuilder sql, int from, int limit) {
-        sql.append(" LIMIT ").append(limit);
-        sql.append(" OFFSET ").append(from);
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBatchDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBatchDAO.java
index d5aceebcd2..563859eb89 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBatchDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBatchDAO.java
@@ -18,18 +18,9 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.CompletableFuture;
-import java.util.function.Function;
-import java.util.stream.Collectors;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.storage.IBatchDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
 import org.apache.skywalking.oap.server.library.client.request.PrepareRequest;
 import org.apache.skywalking.oap.server.library.datacarrier.DataCarrier;
@@ -38,13 +29,19 @@ import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.BatchSQLExecutor;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
 @Slf4j
 public class JDBCBatchDAO implements IBatchDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final DataCarrier<PrepareRequest> dataCarrier;
     private final int maxBatchSqlSize;
 
-    public JDBCBatchDAO(JDBCHikariCPClient jdbcClient, int maxBatchSqlSize, int asyncBatchPersistentPoolSize) {
+    public JDBCBatchDAO(JDBCClient jdbcClient, int maxBatchSqlSize, int asyncBatchPersistentPoolSize) {
         this.jdbcClient = jdbcClient;
         String name = "H2_ASYNCHRONOUS_BATCH_PERSISTENT";
         if (log.isDebugEnabled()) {
@@ -52,7 +49,7 @@ public class JDBCBatchDAO implements IBatchDAO {
         }
         this.maxBatchSqlSize = maxBatchSqlSize;
         this.dataCarrier = new DataCarrier<>(name, asyncBatchPersistentPoolSize, 10000);
-        this.dataCarrier.consume(new JDBCBatchDAO.H2BatchConsumer(this), asyncBatchPersistentPoolSize, 20);
+        this.dataCarrier.consume(new H2BatchConsumer(this), asyncBatchPersistentPoolSize, 20);
     }
 
     @Override
@@ -74,22 +71,16 @@ public class JDBCBatchDAO implements IBatchDAO {
             log.debug("to execute sql statements execute, data size: {}, maxBatchSqlSize: {}", sqls.size(), maxBatchSqlSize);
         }
 
-        final Map<PrepareRequest, List<PrepareRequest>> batchRequestMap =
-            sqls.stream().collect(Collectors.groupingBy(Function.identity()));
-        try (Connection connection = jdbcClient.getConnection()) {
-            batchRequestMap.forEach((key, requests) -> {
-                try {
-                    BatchSQLExecutor batchSQLExecutor =
-                            new BatchSQLExecutor(requests);
-                    batchSQLExecutor.invoke(connection, maxBatchSqlSize);
-                } catch (SQLException e) {
-                    // Just avoid one execution failure makes the rest of batch failure.
-                    log.error(e.getMessage(), e);
-                }
-            });
-        } catch (SQLException | JDBCClientException e) {
-            log.warn("execute sql failed, discard data size: {}", prepareRequests.size(), e);
-        }
+        final var batchRequestsOfSql = sqls.stream().collect(Collectors.groupingBy(Function.identity()));
+        batchRequestsOfSql.forEach((sql, requests) -> {
+            try {
+                final var batchSQLExecutor = new BatchSQLExecutor(jdbcClient, requests);
+                batchSQLExecutor.invoke(maxBatchSqlSize);
+            } catch (Exception e) {
+                // Just to avoid one execution failure makes the rest of batch failure.
+                log.error(e.getMessage(), e);
+            }
+        });
         if (log.isDebugEnabled()) {
             log.debug("execute sql statements done, data size: {}, maxBatchSqlSize: {}", prepareRequests.size(), maxBatchSqlSize);
         }
@@ -101,7 +92,7 @@ public class JDBCBatchDAO implements IBatchDAO {
         this.dataCarrier.produce(insertRequest);
     }
 
-    private class H2BatchConsumer implements IConsumer<PrepareRequest> {
+    private static class H2BatchConsumer implements IConsumer<PrepareRequest> {
 
         private final JDBCBatchDAO h2BatchDAO;
 
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBrowserLogQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBrowserLogQueryDAO.java
index 03b6bb3b9b..2dbb1ca953 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBrowserLogQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCBrowserLogQueryDAO.java
@@ -17,48 +17,92 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
 import lombok.RequiredArgsConstructor;
-import org.apache.skywalking.oap.server.core.query.input.Duration;
-import org.apache.skywalking.oap.server.library.util.StringUtil;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.browser.manual.errorlog.BrowserErrorLogRecord;
 import org.apache.skywalking.oap.server.core.browser.source.BrowserErrorCategory;
+import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.type.BrowserErrorLog;
 import org.apache.skywalking.oap.server.core.query.type.BrowserErrorLogs;
 import org.apache.skywalking.oap.server.core.storage.query.IBrowserLogQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
 
+import static java.util.Comparator.comparing;
 import static java.util.Objects.nonNull;
+import static java.util.stream.Collectors.toList;
 
 @RequiredArgsConstructor
 public class JDBCBrowserLogQueryDAO implements IBrowserLogQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    protected final JDBCClient jdbcClient;
+    protected final TableHelper tableHelper;
 
     @Override
+    @SneakyThrows
     public BrowserErrorLogs queryBrowserErrorLogs(String serviceId,
                                                   String serviceVersionId,
                                                   String pagePathId,
                                                   BrowserErrorCategory category,
                                                   Duration duration,
                                                   int limit,
-                                                  int from) throws IOException {
+                                                  int from) {
+        final var logs = new ArrayList<BrowserErrorLog>();
+        final var tables = tableHelper.getTablesForRead(
+            BrowserErrorLogRecord.INDEX_NAME, duration.getStartTimeBucket(), duration.getEndTimeBucket()
+        );
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQL(
+                serviceId, serviceVersionId, pagePathId,
+                category, duration, limit, from, table);
+
+            jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        String dataBinaryBase64 = resultSet.getString(BrowserErrorLogRecord.DATA_BINARY);
+                        if (nonNull(dataBinaryBase64)) {
+                            BrowserErrorLog log = parserDataBinary(dataBinaryBase64);
+                            logs.add(log);
+                        }
+                    }
+
+                    return null;
+                },
+                sqlAndParameters.parameters()
+            );
+        }
+
+        return new BrowserErrorLogs(
+            logs
+                .stream()
+                .sorted(comparing(BrowserErrorLog::getTime).reversed())
+                .skip(from)
+                .limit(limit)
+                .collect(toList())
+        );
+    }
+
+    protected SQLAndParameters buildSQL(
+        String serviceId, String serviceVersionId, String pagePathId,
+        BrowserErrorCategory category, Duration duration, int limit, int from,
+        String table) {
+        final var sql = new StringBuilder("select " + BrowserErrorLogRecord.DATA_BINARY);
+        final var parameters = new ArrayList<>(9);
+        sql.append(" from ").append(table)
+           .append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        parameters.add(BrowserErrorLogRecord.INDEX_NAME);
+
         long startSecondTB = 0, endSecondTB = 0;
         if (nonNull(duration)) {
             startSecondTB = duration.getStartTimeBucketInSec();
             endSecondTB = duration.getEndTimeBucketInSec();
         }
-        StringBuilder sql = new StringBuilder();
-
-        List<Object> parameters = new ArrayList<>(9);
-
-        sql.append("from ").append(BrowserErrorLogRecord.INDEX_NAME)
-           .append(" where ").append(" 1=1 ");
-
         if (startSecondTB != 0 && endSecondTB != 0) {
             sql.append(" and ").append(BrowserErrorLogRecord.TIME_BUCKET).append(" >= ?");
             parameters.add(startSecondTB);
@@ -84,32 +128,8 @@ public class JDBCBrowserLogQueryDAO implements IBrowserLogQueryDAO {
         }
 
         sql.append(" order by ").append(BrowserErrorLogRecord.TIMESTAMP).append(" DESC ");
+        sql.append(" limit ").append(from + limit);
 
-        BrowserErrorLogs logs = new BrowserErrorLogs();
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            buildLimit(sql, from, limit);
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, "select " + BrowserErrorLogRecord.DATA_BINARY + " " + sql.toString(),
-                parameters.toArray(new Object[0])
-            )) {
-                while (resultSet.next()) {
-                    String dataBinaryBase64 = resultSet.getString(BrowserErrorLogRecord.DATA_BINARY);
-                    if (nonNull(dataBinaryBase64)) {
-                        BrowserErrorLog log = parserDataBinary(dataBinaryBase64);
-                        logs.getLogs().add(log);
-                    }
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-        return logs;
-    }
-
-    protected void buildLimit(StringBuilder sql, int from, int limit) {
-        sql.append(" limit ").append(limit);
-        sql.append(" offset ").append(from);
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingDataDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingDataDAO.java
index 348b2b2448..efe96ec90f 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingDataDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingDataDAO.java
@@ -18,47 +18,74 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import lombok.AllArgsConstructor;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+import org.apache.skywalking.oap.server.core.analysis.DownSampling;
+import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
 import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingDataRecord;
 import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingDataDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 
-import java.io.IOException;
-import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Base64;
 import java.util.List;
 
-@AllArgsConstructor
+import static java.util.stream.Collectors.joining;
+
+@RequiredArgsConstructor
 public class JDBCEBPFProfilingDataDAO implements IEBPFProfilingDataDAO {
-    private JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<EBPFProfilingDataRecord> queryData(List<String> scheduleIdList, long beginTime, long endTime) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        final StringBuilder conditionSql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(scheduleIdList.size() + 2);
-        sql.append("select * from ").append(EBPFProfilingDataRecord.INDEX_NAME);
-
-        appendListCondition(conditionSql, condition, EBPFProfilingDataRecord.SCHEDULE_ID, scheduleIdList);
-        appendCondition(conditionSql, condition, EBPFProfilingDataRecord.UPLOAD_TIME, ">=", beginTime);
-        appendCondition(conditionSql, condition, EBPFProfilingDataRecord.UPLOAD_TIME, "<", endTime);
+    @SneakyThrows
+    public List<EBPFProfilingDataRecord> queryData(List<String> scheduleIdList, long beginTime, long endTime) {
+        final var tables = tableHelper.getTablesForRead(
+            EBPFProfilingDataRecord.INDEX_NAME,
+            TimeBucket.getTimeBucket(beginTime, DownSampling.Day),
+            TimeBucket.getTimeBucket(endTime, DownSampling.Day)
+        );
+        final var results = new ArrayList<EBPFProfilingDataRecord>();
 
-        if (conditionSql.length() > 0) {
-            sql.append(" where ").append(conditionSql);
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQL(scheduleIdList, beginTime, endTime, table);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildDataList,
+                    sqlAndParameters.parameters()
+                )
+            );
         }
+        return results;
+    }
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                    connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildDataList(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+    protected SQLAndParameters buildSQL(
+        final List<String> scheduleIdList,
+        final long beginTime,
+        final long endTime,
+        final String table) {
+        final var sql = new StringBuilder();
+        final var conditions = new StringBuilder();
+        final var parameters = new ArrayList<>(scheduleIdList.size() + 3);
+        sql.append("select * from ").append(table);
+        conditions.append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        parameters.add(EBPFProfilingDataRecord.INDEX_NAME);
+
+        appendConditions(conditions, parameters, EBPFProfilingDataRecord.SCHEDULE_ID, scheduleIdList);
+        appendCondition(conditions, parameters, EBPFProfilingDataRecord.UPLOAD_TIME, ">=", beginTime);
+        appendCondition(conditions, parameters, EBPFProfilingDataRecord.UPLOAD_TIME, "<", endTime);
+
+        if (conditions.length() > 0) {
+            sql.append(conditions);
         }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     private List<EBPFProfilingDataRecord> buildDataList(ResultSet resultSet) throws SQLException {
@@ -80,26 +107,20 @@ public class JDBCEBPFProfilingDataDAO implements IEBPFProfilingDataDAO {
         return dataList;
     }
 
-    private void appendCondition(StringBuilder conditionSql, List<Object> condition, String filed, String compare, Object data) {
+    private void appendCondition(StringBuilder conditionSql, List<Object> condition, String field, String compare, Object data) {
         if (conditionSql.length() > 0) {
             conditionSql.append(" and ");
         }
-        conditionSql.append(filed).append(compare).append("?");
+        conditionSql.append(field).append(compare).append("?");
         condition.add(data);
     }
 
-    private <T> void appendListCondition(StringBuilder conditionSql, List<Object> condition, String filed, List<T> data) {
+    private <T> void appendConditions(StringBuilder conditionSql, List<Object> condition, String field, List<T> data) {
         if (conditionSql.length() > 0) {
             conditionSql.append(" and ");
         }
-        conditionSql.append(filed).append(" in (");
-        for (int i = 0; i < data.size(); i++) {
-            if (i > 0) {
-                conditionSql.append(", ");
-            }
-            conditionSql.append("?");
-            condition.add(data.get(i));
-        }
-        conditionSql.append(")");
+        conditionSql.append(field).append(" in ")
+                    .append(data.stream().map(it -> "?").collect(joining(", ", "(", ")")));
+        condition.addAll(data);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingScheduleDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingScheduleDAO.java
index 269642c9d0..3520fad1bd 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingScheduleDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingScheduleDAO.java
@@ -18,55 +18,73 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import lombok.AllArgsConstructor;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingScheduleRecord;
 import org.apache.skywalking.oap.server.core.query.type.EBPFProfilingSchedule;
 import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingScheduleDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
-import org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.H2TableInstaller;
-import java.io.IOException;
-import java.sql.Connection;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCEntityConverters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 
-@AllArgsConstructor
+@RequiredArgsConstructor
 public class JDBCEBPFProfilingScheduleDAO implements IEBPFProfilingScheduleDAO {
-    private JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<EBPFProfilingSchedule> querySchedules(String taskId) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        final StringBuilder conditionSql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(4);
-        sql.append("select * from ").append(EBPFProfilingScheduleRecord.INDEX_NAME);
+    @SneakyThrows
+    public List<EBPFProfilingSchedule> querySchedules(String taskId) {
+        final var tables = tableHelper.getTablesWithinTTL(EBPFProfilingScheduleRecord.INDEX_NAME);
+        final var schedules = new ArrayList<EBPFProfilingSchedule>();
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQL(taskId, table);
+            schedules.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildSchedules,
+                    sqlAndParameters.parameters()
+                )
+            );
+        }
+        return schedules;
+    }
 
-        appendCondition(conditionSql, condition, EBPFProfilingScheduleRecord.TASK_ID, "=", taskId);
+    protected SQLAndParameters buildSQL(
+        final String taskId,
+        final String table) {
+        final var sql = new StringBuilder();
+        final var conditions = new StringBuilder();
+        final var parameters = new ArrayList<>(4);
+        sql.append("select * from ").append(table);
+        conditions.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(EBPFProfilingScheduleRecord.INDEX_NAME);
 
-        if (conditionSql.length() > 0) {
-            sql.append(" where ").append(conditionSql);
-        }
+        appendCondition(conditions, parameters, EBPFProfilingScheduleRecord.TASK_ID, "=", taskId);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                    connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildSchedules(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+        if (conditions.length() > 0) {
+            sql.append(" where ").append(conditions);
         }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     private List<EBPFProfilingSchedule> buildSchedules(ResultSet resultSet) throws SQLException {
         List<EBPFProfilingSchedule> schedules = new ArrayList<>();
         while (resultSet.next()) {
+            final var r = new EBPFProfilingScheduleRecord.Builder().storage2Entity(JDBCEntityConverters.toEntity(resultSet));
             EBPFProfilingSchedule schedule = new EBPFProfilingSchedule();
-            schedule.setScheduleId(resultSet.getString(H2TableInstaller.ID_COLUMN));
-            schedule.setTaskId(resultSet.getString(EBPFProfilingScheduleRecord.TASK_ID));
-            schedule.setProcessId(resultSet.getString(EBPFProfilingScheduleRecord.PROCESS_ID));
-            schedule.setStartTime(resultSet.getLong(EBPFProfilingScheduleRecord.START_TIME));
-            schedule.setEndTime(resultSet.getLong(EBPFProfilingScheduleRecord.END_TIME));
+            schedule.setScheduleId(r.getScheduleId());
+            schedule.setTaskId(r.getTaskId());
+            schedule.setProcessId(r.getProcessId());
+            schedule.setStartTime(r.getStartTime());
+            schedule.setEndTime(r.getEndTime());
 
             schedules.add(schedule);
         }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingTaskDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingTaskDAO.java
index 897a8d9ee7..33a7ec5ba9 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingTaskDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEBPFProfilingTaskDAO.java
@@ -19,7 +19,8 @@
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import com.google.gson.Gson;
-import lombok.AllArgsConstructor;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingTargetType;
 import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilingTaskRecord;
@@ -27,12 +28,12 @@ import org.apache.skywalking.oap.server.core.profiling.ebpf.storage.EBPFProfilin
 import org.apache.skywalking.oap.server.core.query.type.EBPFProfilingTask;
 import org.apache.skywalking.oap.server.core.query.type.EBPFProfilingTaskExtension;
 import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IEBPFProfilingTaskDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
-import org.apache.skywalking.oap.server.library.util.CollectionUtils;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 
-import java.io.IOException;
-import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
@@ -40,103 +41,140 @@ import java.util.Collections;
 import java.util.List;
 import java.util.stream.Collectors;
 
-@AllArgsConstructor
+@RequiredArgsConstructor
 public class JDBCEBPFProfilingTaskDAO implements IEBPFProfilingTaskDAO {
     private static final Gson GSON = new Gson();
-    private JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<EBPFProfilingTask> queryTasksByServices(List<String> serviceIdList, long taskStartTime, long latestUpdateTime) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>();
-        sql.append("select * from ").append(EBPFProfilingTaskRecord.INDEX_NAME);
+    @SneakyThrows
+    public List<EBPFProfilingTask> queryTasksByServices(List<String> serviceIdList, long taskStartTime, long latestUpdateTime) {
+        final var tables = tableHelper.getTablesWithinTTL(EBPFProfilingTaskRecord.INDEX_NAME);
+        final var results = new ArrayList<EBPFProfilingTask>();
+
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQLForQueryTasksByServices(serviceIdList, taskStartTime, latestUpdateTime, table);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildTasks,
+                    sqlAndParameters.parameters()
+                )
+            );
+        }
+        return results;
+    }
+
+    protected SQLAndParameters buildSQLForQueryTasksByServices(
+        final List<String> serviceIdList,
+        final long taskStartTime,
+        final long latestUpdateTime,
+        final String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>();
+        sql.append("select * from ").append(table);
 
-        StringBuilder conditionSql = new StringBuilder();
+        final var conditionSql = new StringBuilder();
+        conditionSql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(EBPFProfilingTaskRecord.INDEX_NAME);
 
-        appendListCondition(conditionSql, condition, EBPFProfilingTaskRecord.SERVICE_ID, serviceIdList);
+        appendListCondition(conditionSql, parameters, EBPFProfilingTaskRecord.SERVICE_ID, serviceIdList);
         if (taskStartTime > 0) {
-            appendCondition(conditionSql, condition,
+            appendCondition(conditionSql, parameters,
                 EBPFProfilingTaskRecord.START_TIME, ">=", taskStartTime);
         }
         if (latestUpdateTime > 0) {
-            appendCondition(conditionSql, condition,
+            appendCondition(conditionSql, parameters,
                 EBPFProfilingTaskRecord.LAST_UPDATE_TIME, ">", latestUpdateTime);
         }
 
         if (conditionSql.length() > 0) {
             sql.append(" where ").append(conditionSql);
         }
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildTasks(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     @Override
-    public List<EBPFProfilingTask> queryTasksByTargets(String serviceId, String serviceInstanceId, List<EBPFProfilingTargetType> targetTypes, long taskStartTime, long latestUpdateTime) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>();
-        sql.append("select * from ").append(EBPFProfilingTaskRecord.INDEX_NAME);
+    @SneakyThrows
+    public List<EBPFProfilingTask> queryTasksByTargets(String serviceId, String serviceInstanceId, List<EBPFProfilingTargetType> targetTypes, long taskStartTime, long latestUpdateTime) {
+        final var results = new ArrayList<EBPFProfilingTask>();
+        final var tables = tableHelper.getTablesWithinTTL(EBPFProfilingTaskRecord.INDEX_NAME);
+
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQLForQueryTasksByTargets(
+                serviceId, serviceInstanceId, targetTypes, taskStartTime, latestUpdateTime, table
+            );
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildTasks,
+                    sqlAndParameters.parameters()
+                )
+            );
+        }
+        return results;
+    }
 
-        StringBuilder conditionSql = new StringBuilder();
+    protected SQLAndParameters buildSQLForQueryTasksByTargets(
+        final String serviceId,
+        final String serviceInstanceId,
+        final List<EBPFProfilingTargetType> targetTypes,
+        final long taskStartTime,
+        final long latestUpdateTime,
+        final String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>();
+        final var conditions = new StringBuilder();
+
+        sql.append("select * from ").append(table);
+        conditions.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(EBPFProfilingTaskRecord.INDEX_NAME);
 
         if (StringUtil.isNotEmpty(serviceId)) {
-            appendCondition(conditionSql, condition, EBPFProfilingTaskRecord.SERVICE_ID, serviceId);
+            appendCondition(conditions, parameters, EBPFProfilingTaskRecord.SERVICE_ID, serviceId);
         }
         if (StringUtil.isNotEmpty(serviceInstanceId)) {
-            appendCondition(conditionSql, condition, EBPFProfilingTaskRecord.INSTANCE_ID, serviceInstanceId);
+            appendCondition(conditions, parameters, EBPFProfilingTaskRecord.INSTANCE_ID, serviceInstanceId);
         }
-        appendListCondition(conditionSql, condition, EBPFProfilingTaskRecord.TARGET_TYPE, targetTypes.stream()
-            .map(EBPFProfilingTargetType::value).collect(Collectors.toList()));
+        appendListCondition(conditions, parameters, EBPFProfilingTaskRecord.TARGET_TYPE,
+            targetTypes.stream().map(EBPFProfilingTargetType::value).collect(Collectors.toList()));
         if (taskStartTime > 0) {
-            appendCondition(conditionSql, condition,
+            appendCondition(conditions, parameters,
                 EBPFProfilingTaskRecord.START_TIME, ">=", taskStartTime);
         }
         if (latestUpdateTime > 0) {
-            appendCondition(conditionSql, condition,
+            appendCondition(conditions, parameters,
                 EBPFProfilingTaskRecord.LAST_UPDATE_TIME, ">", latestUpdateTime);
         }
 
-        if (conditionSql.length() > 0) {
-            sql.append(" where ").append(conditionSql);
+        if (conditions.length() > 0) {
+            sql.append(" where ").append(conditions);
         }
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildTasks(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     @Override
-    public EBPFProfilingTask queryById(String id) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        sql.append("select * from ").append(EBPFProfilingTaskRecord.INDEX_NAME)
-            .append(" where ").append(EBPFProfilingTaskRecord.LOGICAL_ID).append("=?");
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), id)) {
-                final List<EBPFProfilingTask> tasks = buildTasks(resultSet);
-                if (CollectionUtils.isEmpty(tasks)) {
-                    return null;
-                }
-                EBPFProfilingTask result = tasks.get(0);
-                for (int i = 1; i < tasks.size(); i++) {
-                    result = result.combine(tasks.get(i));
-                }
+    @SneakyThrows
+    public EBPFProfilingTask queryById(String id) {
+        final var tables = tableHelper.getTablesWithinTTL(EBPFProfilingTaskRecord.INDEX_NAME);
+        for (final var table : tables) {
+            final var sql = new StringBuilder();
+            sql.append("select * from ").append(table)
+               .append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?")
+               .append(EBPFProfilingTaskRecord.LOGICAL_ID).append(" = ?");
+
+            final var result = jdbcClient.executeQuery(
+                sql.toString(),
+                resultSet -> buildTasks(resultSet).stream().reduce(EBPFProfilingTask::combine).orElse(null),
+                EBPFProfilingTaskRecord.INDEX_NAME, id
+            );
+            if (result != null) {
                 return result;
             }
-        } catch (SQLException e) {
-            throw new IOException(e);
         }
+        return null;
     }
 
     private List<EBPFProfilingTask> buildTasks(ResultSet resultSet) throws SQLException {
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEventQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEventQueryDAO.java
index 34b5153d61..45c7dacd32 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEventQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCEventQueryDAO.java
@@ -20,15 +20,10 @@ package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import io.vavr.Tuple;
 import io.vavr.Tuple2;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.List;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.analysis.Layer;
+import org.apache.skywalking.oap.server.core.analysis.metrics.Event;
 import org.apache.skywalking.oap.server.core.query.PaginationUtils;
 import org.apache.skywalking.oap.server.core.query.enumeration.Order;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
@@ -36,91 +31,130 @@ import org.apache.skywalking.oap.server.core.query.type.event.EventQueryConditio
 import org.apache.skywalking.oap.server.core.query.type.event.EventType;
 import org.apache.skywalking.oap.server.core.query.type.event.Events;
 import org.apache.skywalking.oap.server.core.query.type.event.Source;
-import org.apache.skywalking.oap.server.core.analysis.metrics.Event;
 import org.apache.skywalking.oap.server.core.storage.query.IEventQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 
-import static java.util.Objects.isNull;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Stream;
 
 import static com.google.common.base.Strings.isNullOrEmpty;
+import static java.util.Comparator.comparing;
+import static java.util.Objects.isNull;
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
 
 @Slf4j
 @RequiredArgsConstructor
 public class JDBCEventQueryDAO implements IEventQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
     public Events queryEvents(final EventQueryCondition condition) throws Exception {
         final Tuple2<Stream<String>, Stream<Object>> conditionsParametersPair = buildQuery(condition);
         final Stream<String> conditions = conditionsParametersPair._1();
         final Object[] parameters = conditionsParametersPair._2().toArray();
-        final String whereClause = conditions.collect(Collectors.joining(" and ", " where ", ""));
+        final String whereClause = conditions.collect(joining(" and ", " where ", ""));
 
-        final Events result = new Events();
+        final var tables = tableHelper.getTablesWithinTTL(Event.INDEX_NAME);
+        final var queryOrder = isNull(condition.getOrder()) ? Order.DES : condition.getOrder();
+        final var page = PaginationUtils.INSTANCE.exchange(condition.getPaging());
+        final var events = new ArrayList<org.apache.skywalking.oap.server.core.query.type.event.Event>();
 
-        try (final Connection connection = jdbcClient.getConnection()) {
-            final Order queryOrder = isNull(condition.getOrder()) ? Order.DES : condition.getOrder();
-            final PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(condition.getPaging());
-            String sql = "select * from " + Event.INDEX_NAME + whereClause;
+        for (String table : tables) {
+            String sql = "select * from " + table + whereClause;
             if (Order.DES.equals(queryOrder)) {
                 sql += " order by " + Event.START_TIME + " desc";
             } else {
                 sql += " order by " + Event.START_TIME + " asc";
             }
-            sql += " limit " + page.getLimit() + " offset " + page.getFrom();
+            sql += " limit " + (page.getLimit() + page.getFrom());
             if (log.isDebugEnabled()) {
                 log.debug("Query SQL: {}, parameters: {}", sql, parameters);
             }
-            try (final ResultSet resultSet = jdbcClient.executeQuery(connection, sql, parameters)) {
+
+            jdbcClient.executeQuery(sql, resultSet -> {
                 while (resultSet.next()) {
-                    result.getEvents().add(parseResultSet(resultSet));
+                    events.add(parseResultSet(resultSet));
                 }
-            }
+                return null;
+            }, parameters);
         }
 
-        return result;
+        final var comparator = Order.DES.equals(queryOrder) ?
+            comparing(org.apache.skywalking.oap.server.core.query.type.event.Event::getStartTime).reversed() :
+            comparing(org.apache.skywalking.oap.server.core.query.type.event.Event::getStartTime);
+        return new Events(
+            events
+                .stream()
+                .sorted(comparator)
+                .skip(page.getFrom())
+                .limit(page.getLimit())
+                .collect(toList())
+        );
     }
 
     @Override
     public Events queryEvents(List<EventQueryCondition> conditions) throws Exception {
-        final List<Tuple2<Stream<String>, Stream<Object>>> conditionsParametersPair = conditions.stream()
-                                                                                                .map(this::buildQuery)
-                                                                                                .collect(Collectors.toList());
-        final Object[] parameters = conditionsParametersPair.stream()
-                                                            .map(Tuple2::_2)
-                                                            .reduce(Stream.empty(), Stream::concat)
-                                                            .toArray();
-        final String whereClause = conditionsParametersPair.stream()
-                                                       .map(Tuple2::_1)
-                                                       .map(it -> it.collect(Collectors.joining(" and ")))
-                                                       .collect(Collectors.joining(" or ", " where ", ""));
-
-        final Events result = new Events();
-        try (final Connection connection = jdbcClient.getConnection()) {
-            EventQueryCondition condition = conditions.get(0);
-            final Order queryOrder = isNull(condition.getOrder()) ? Order.DES : condition.getOrder();
-            final PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(condition.getPaging());
-            String sql = "select * from " + Event.INDEX_NAME + whereClause;
+        final var conditionsParametersPair = conditions.stream()
+                                                       .map(this::buildQuery)
+                                                       .collect(toList());
+        final var parameters = conditionsParametersPair.stream()
+                                                       .map(Tuple2::_2)
+                                                       .reduce(Stream.empty(), Stream::concat)
+                                                       .toArray();
+        final var whereClause = conditionsParametersPair.stream()
+                                                        .map(Tuple2::_1)
+                                                        .map(it -> it.collect(joining(" and ")))
+                                                        .collect(joining(" or ", " where ", ""));
+
+        EventQueryCondition condition = conditions.get(0);
+        final Order queryOrder = isNull(condition.getOrder()) ? Order.DES : condition.getOrder();
+        final PaginationUtils.Page page = PaginationUtils.INSTANCE.exchange(condition.getPaging());
+
+        final var tables = tableHelper.getTablesWithinTTL(Event.INDEX_NAME);
+        final var events = new ArrayList<org.apache.skywalking.oap.server.core.query.type.event.Event>();
+
+        for (String table : tables) {
+            String sql = "select * from " + table + whereClause;
             if (Order.DES.equals(queryOrder)) {
                 sql += " order by " + Event.START_TIME + " desc";
             } else {
                 sql += " order by " + Event.START_TIME + " asc";
             }
-            sql += " limit " + page.getLimit() + " offset " + page.getFrom();
+            sql += " limit " + (page.getLimit() + page.getFrom());
             if (log.isDebugEnabled()) {
                 log.debug("Query SQL: {}, parameters: {}", sql, parameters);
             }
-            try (final ResultSet resultSet = jdbcClient.executeQuery(connection, sql, parameters)) {
+            jdbcClient.executeQuery(sql, resultSet -> {
                 while (resultSet.next()) {
-                    result.getEvents().add(parseResultSet(resultSet));
+                    events.add(parseResultSet(resultSet));
                 }
-            }
+
+                return null;
+            }, parameters);
         }
-        return result;
+
+        final var comparator = Order.DES.equals(queryOrder) ?
+            comparing(org.apache.skywalking.oap.server.core.query.type.event.Event::getStartTime).reversed() :
+            comparing(org.apache.skywalking.oap.server.core.query.type.event.Event::getStartTime);
+        return new Events(
+            events
+                .stream()
+                .sorted(comparator)
+                .skip(page.getFrom())
+                .limit(page.getLimit())
+                .collect(toList())
+        );
     }
 
     protected org.apache.skywalking.oap.server.core.query.type.event.Event parseResultSet(final ResultSet resultSet) throws SQLException {
-        final org.apache.skywalking.oap.server.core.query.type.event.Event event = new org.apache.skywalking.oap.server.core.query.type.event.Event();
+        final var event = new org.apache.skywalking.oap.server.core.query.type.event.Event();
 
         event.setUuid(resultSet.getString(Event.UUID));
 
@@ -143,6 +177,9 @@ public class JDBCEventQueryDAO implements IEventQueryDAO {
         final Stream.Builder<String> conditions = Stream.builder();
         final Stream.Builder<Object> parameters = Stream.builder();
 
+        conditions.add(JDBCTableInstaller.TABLE_COLUMN + " = ?");
+        parameters.add(Event.INDEX_NAME);
+
         if (!isNullOrEmpty(condition.getUuid())) {
             conditions.add(Event.UUID + "=?");
             parameters.add(condition.getUuid());
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCHistoryDeleteDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCHistoryDeleteDAO.java
index 7a2124e4f4..fe174f977b 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCHistoryDeleteDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCHistoryDeleteDAO.java
@@ -18,66 +18,93 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.skywalking.oap.server.core.analysis.DownSampling;
+import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
 import org.apache.skywalking.oap.server.core.storage.IHistoryDeleteDAO;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
-import org.apache.skywalking.oap.server.core.storage.model.SQLDatabaseModelExtension;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLBuilder;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 import org.joda.time.DateTime;
-import lombok.RequiredArgsConstructor;
 
+import java.time.Clock;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+
+@Slf4j
 @RequiredArgsConstructor
 public class JDBCHistoryDeleteDAO implements IHistoryDeleteDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
+    private final JDBCTableInstaller modelInstaller;
+    private final Clock clock;
+
+    private final Map<String, Long> lastDeletedTimeBucket = new ConcurrentHashMap<>();
 
     @Override
-    public void deleteHistory(Model model, String timeBucketColumnName, int ttl) throws IOException {
-        SQLBuilder dataDeleteSQL = new SQLBuilder("delete from " + model.getName() + " where ")
-            .append(timeBucketColumnName).append("<= ? ")
-            .append(" and ")
-            .append(timeBucketColumnName).append(">= ? ");
+    @SneakyThrows
+    public void deleteHistory(Model model, String timeBucketColumnName, int ttl) {
+        final var endTimeBucket = TimeBucket.getTimeBucket(clock.millis(), DownSampling.Day);
+        final var startTimeBucket = endTimeBucket - ttl;
+        log.info(
+            "Deleting history data, ttl: {}, now: {}. Keep [{}, {}]",
+            ttl,
+            clock.millis(),
+            startTimeBucket,
+            endTimeBucket
+        );
+
+        final var deadline = Long.parseLong(new DateTime().minusDays(ttl).toString("yyyyMMdd"));
+        final var lastSuccessDeadline = lastDeletedTimeBucket.getOrDefault(model.getName(), 0L);
+        if (deadline <= lastSuccessDeadline) {
+            if (log.isDebugEnabled()) {
+                log.debug(
+                    "The deadline {} is less than the last success deadline {}, skip deleting history data",
+                    deadline,
+                    lastSuccessDeadline
+                );
+            }
+            return;
+        }
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            long deadline;
-            long minTime;
-            if (model.isRecord()) {
-                deadline = Long.parseLong(new DateTime().plusDays(-ttl).toString("yyyyMMddHHmmss"));
-                minTime = 1000_00_00_00_00_00L;
-            } else {
-                switch (model.getDownsampling()) {
-                    case Minute:
-                        deadline = Long.parseLong(new DateTime().plusDays(-ttl).toString("yyyyMMddHHmm"));
-                        minTime = 1000_00_00_00_00L;
-                        break;
-                    case Hour:
-                        deadline = Long.parseLong(new DateTime().plusDays(-ttl).toString("yyyyMMddHH"));
-                        minTime = 1000_00_00_00L;
-                        break;
-                    case Day:
-                        deadline = Long.parseLong(new DateTime().plusDays(-ttl).toString("yyyyMMdd"));
-                        minTime = 1000_00_00L;
-                        break;
-                    default:
-                        return;
-                }
+        final var ttlTables = tableHelper.getTablesForRead(model.getName(), startTimeBucket, endTimeBucket);
+        final var tablesToDrop = new HashSet<String>();
+
+        try (final var conn = jdbcClient.getConnection();
+             final var result = conn.getMetaData().getTables(null, null, TableHelper.getTableName(model) + "%", new String[]{"TABLE"})) {
+            while (result.next()) {
+                tablesToDrop.add(result.getString("TABLE_NAME"));
             }
-            jdbcClient.executeUpdate(connection, dataDeleteSQL.toString(), deadline, minTime);
-            // Delete additional tables
-            for (SQLDatabaseModelExtension.AdditionalTable additionalTable : model.getSqlDBModelExtension()
-                                                                                  .getAdditionalTables()
-                                                                                  .values()) {
-                SQLBuilder additionalTableDeleteSQL = new SQLBuilder("delete from " + additionalTable.getName() + " where ")
-                    .append(timeBucketColumnName).append("<= ? ")
-                    .append(" and ")
-                    .append(timeBucketColumnName).append(">= ? ");
-                jdbcClient.executeUpdate(connection, additionalTableDeleteSQL.toString(), deadline, minTime);
+        }
+
+        ttlTables.forEach(tablesToDrop::remove);
+        tablesToDrop.removeIf(it -> !it.matches(".*_\\d{8}$"));
+        for (final var table : tablesToDrop) {
+            final var dropSql = new SQLBuilder("drop table if exists ").append(table);
+            jdbcClient.executeUpdate(dropSql.toString());
+        }
+
+        // Drop additional tables
+        for (final var table : tablesToDrop) {
+            final var timeBucket = TableHelper.getTimeBucket(table);
+            for (final var additionalTable : model.getSqlDBModelExtension().getAdditionalTables().values()) {
+                final var additionalTableToDrop = TableHelper.getTable(additionalTable.getName(), timeBucket);
+                final var dropSql = new SQLBuilder("drop table if exists ").append(additionalTableToDrop);
+                jdbcClient.executeUpdate(dropSql.toString());
             }
-        } catch (JDBCClientException | SQLException e) {
-            throw new IOException(e.getMessage(), e);
         }
+
+        // Create tables for the next day.
+        final var nextTimeBucket = TimeBucket.getTimeBucket(clock.millis() + TimeUnit.DAYS.toMillis(1), DownSampling.Day);
+        final var table = TableHelper.getTable(model, nextTimeBucket);
+        modelInstaller.createTable(model, table);
+
+        lastDeletedTimeBucket.put(model.getName(), deadline);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCLogQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCLogQueryDAO.java
index 6a57587565..52dc2cbc96 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCLogQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCLogQueryDAO.java
@@ -20,15 +20,8 @@ package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import com.google.common.base.Strings;
 import lombok.RequiredArgsConstructor;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import org.apache.skywalking.oap.server.core.query.input.Duration;
-import org.apache.skywalking.oap.server.library.util.StringUtil;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.CoreModule;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
@@ -37,15 +30,33 @@ import org.apache.skywalking.oap.server.core.analysis.manual.log.LogRecord;
 import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
 import org.apache.skywalking.oap.server.core.config.ConfigService;
 import org.apache.skywalking.oap.server.core.query.enumeration.Order;
+import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.input.TraceScopeCondition;
 import org.apache.skywalking.oap.server.core.query.type.ContentType;
 import org.apache.skywalking.oap.server.core.query.type.Log;
 import org.apache.skywalking.oap.server.core.query.type.Logs;
 import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
+import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import static java.util.Comparator.comparing;
 import static java.util.Objects.nonNull;
+import static java.util.function.Predicate.not;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toSet;
 import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT;
 import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.CONTENT_TYPE;
 import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.ENDPOINT_ID;
@@ -56,15 +67,17 @@ import static org.apache.skywalking.oap.server.core.analysis.manual.log.Abstract
 import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TIMESTAMP;
 import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_ID;
 import static org.apache.skywalking.oap.server.core.analysis.manual.log.AbstractLogRecord.TRACE_SEGMENT_ID;
-import static org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.H2TableInstaller.ID_COLUMN;
 
+@Slf4j
 @RequiredArgsConstructor
 public class JDBCLogQueryDAO implements ILogQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final ModuleManager manager;
-    private List<String> searchableTagKeys;
+    private final TableHelper tableHelper;
+    private Set<String> searchableTagKeys;
 
     @Override
+    @SneakyThrows
     public Logs queryLogs(String serviceId,
                           String serviceInstanceId,
                           String endpointId,
@@ -75,46 +88,127 @@ public class JDBCLogQueryDAO implements ILogQueryDAO {
                           final Duration duration,
                           final List<Tag> tags,
                           final List<String> keywordsOfContent,
-                          final List<String> excludingKeywordsOfContent) throws IOException {
+                          final List<String> excludingKeywordsOfContent) {
+        if (searchableTagKeys == null) {
+            final ConfigService configService = manager.find(CoreModule.NAME)
+                                                       .provider()
+                                                       .getService(ConfigService.class);
+            searchableTagKeys = new HashSet<>(Arrays.asList(configService.getSearchableLogsTags().split(Const.COMMA)));
+        }
+        if (tags != null && !searchableTagKeys.containsAll(tags.stream().map(Tag::getKey).collect(toSet()))) {
+            log.warn(
+                "Searching tags that are not searchable: {}",
+                tags.stream().map(Tag::getKey).filter(not(searchableTagKeys::contains)).collect(toSet()));
+            return new Logs();
+        }
+
+        final var tables = tableHelper.getTablesForRead(
+            LogRecord.INDEX_NAME,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var logs = new ArrayList<Log>();
+
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQL(
+                serviceId, serviceInstanceId, endpointId, relatedTrace, queryOrder,
+                from, limit, duration, tags, keywordsOfContent, excludingKeywordsOfContent, table);
+
+            logs.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::parseResults,
+                    sqlAndParameters.parameters()
+                )
+            );
+        }
+        final var comparator = Order.DES.equals(queryOrder) ?
+            comparing(Log::getTimestamp) :
+            comparing(Log::getTimestamp).reversed();
+
+        return new Logs(
+            logs
+                .stream()
+                .sorted(comparator)
+                .skip(from)
+                .limit(limit)
+                .collect(toList())
+        );
+    }
+
+    protected ArrayList<Log> parseResults(ResultSet resultSet) throws SQLException {
+        final var logs = new ArrayList<Log>();
+        while (resultSet.next()) {
+            Log log = new Log();
+            log.setServiceId(resultSet.getString(SERVICE_ID));
+            log.setServiceInstanceId(resultSet.getString(SERVICE_INSTANCE_ID));
+            log.setEndpointId(resultSet.getString(ENDPOINT_ID));
+            if (log.getEndpointId() != null) {
+                log.setEndpointName(IDManager.EndpointID.analysisId(log.getEndpointId()).getEndpointName());
+            }
+            log.setTraceId(resultSet.getString(TRACE_ID));
+            log.setTimestamp(resultSet.getLong(TIMESTAMP));
+            log.setContentType(ContentType.instanceOf(resultSet.getInt(CONTENT_TYPE)));
+            log.setContent(resultSet.getString(CONTENT));
+            String dataBinaryBase64 = resultSet.getString(TAGS_RAW_DATA);
+            if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
+                parserDataBinary(dataBinaryBase64, log.getTags());
+            }
+            logs.add(log);
+        }
+
+        return logs;
+    }
+
+    protected SQLAndParameters buildSQL(
+        String serviceId,
+        String serviceInstanceId,
+        String endpointId,
+        TraceScopeCondition relatedTrace,
+        Order queryOrder,
+        int from,
+        int limit,
+        final Duration duration,
+        final List<Tag> tags,
+        final List<String> keywordsOfContent,
+        final List<String> excludingKeywordsOfContent,
+        final String table) {
         long startSecondTB = 0;
         long endSecondTB = 0;
         if (nonNull(duration)) {
             startSecondTB = duration.getStartTimeBucketInSec();
             endSecondTB = duration.getEndTimeBucketInSec();
         }
-        if (searchableTagKeys == null) {
-            final ConfigService configService = manager.find(CoreModule.NAME)
-                                                       .provider()
-                                                       .getService(ConfigService.class);
-            searchableTagKeys = Arrays.asList(configService.getSearchableLogsTags().split(Const.COMMA));
-        }
         StringBuilder sql = new StringBuilder();
         List<Object> parameters = new ArrayList<>(10);
 
-        sql.append("from ").append(LogRecord.INDEX_NAME);
-        /**
+        sql.append("select * from ").append(table);
+        /*
          * This is an AdditionalEntity feature, see:
          * {@link org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase.AdditionalEntity}
          */
+        final var timeBucket = TableHelper.getTimeBucket(table);
+        final var tagTable = TableHelper.getTable(AbstractLogRecord.ADDITIONAL_TAG_TABLE, timeBucket);
         if (!CollectionUtils.isEmpty(tags)) {
             for (int i = 0; i < tags.size(); i++) {
-                sql.append(" inner join ").append(AbstractLogRecord.ADDITIONAL_TAG_TABLE).append(" ");
-                sql.append(AbstractLogRecord.ADDITIONAL_TAG_TABLE + i);
-                sql.append(" on ").append(LogRecord.INDEX_NAME).append(".").append(ID_COLUMN).append(" = ");
-                sql.append(AbstractLogRecord.ADDITIONAL_TAG_TABLE + i).append(".").append(ID_COLUMN);
+                sql.append(" inner join ").append(tagTable).append(" ");
+                sql.append(tagTable + i);
+                sql.append(" on ").append(table).append(".").append(JDBCTableInstaller.ID_COLUMN).append(" = ");
+                sql.append(tagTable + i).append(".").append(JDBCTableInstaller.ID_COLUMN);
             }
         }
         sql.append(" where ");
-        sql.append(" 1=1 ");
+        sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(LogRecord.INDEX_NAME);
         if (startSecondTB != 0 && endSecondTB != 0) {
-            sql.append(" and ").append(LogRecord.INDEX_NAME).append(".").append(AbstractLogRecord.TIME_BUCKET).append(" >= ?");
+            sql.append(" and ").append(table).append(".").append(AbstractLogRecord.TIME_BUCKET).append(" >= ?");
             parameters.add(startSecondTB);
-            sql.append(" and ").append(LogRecord.INDEX_NAME).append(".").append(AbstractLogRecord.TIME_BUCKET).append(" <= ?");
+            sql.append(" and ").append(table).append(".").append(AbstractLogRecord.TIME_BUCKET).append(" <= ?");
             parameters.add(endSecondTB);
         }
 
         if (StringUtil.isNotEmpty(serviceId)) {
-            sql.append(" and ").append(LogRecord.INDEX_NAME).append(".").append(SERVICE_ID).append(" = ?");
+            sql.append(" and ").append(table).append(".").append(SERVICE_ID).append(" = ?");
             parameters.add(serviceId);
         }
         if (StringUtil.isNotEmpty(serviceInstanceId)) {
@@ -142,58 +236,18 @@ public class JDBCLogQueryDAO implements ILogQueryDAO {
 
         if (CollectionUtils.isNotEmpty(tags)) {
             for (int i = 0; i < tags.size(); i++) {
-                final int foundIdx = searchableTagKeys.indexOf(tags.get(i).getKey());
-                if (foundIdx > -1) {
-                    sql.append(" and ").append(AbstractLogRecord.ADDITIONAL_TAG_TABLE + i).append(".");
-                    sql.append(AbstractLogRecord.TAGS).append(" = ?");
-                    parameters.add(tags.get(i).toString());
-                } else {
-                    //If the tag is not searchable, but is required, then don't need to run the real query.
-                    return new Logs();
-                }
+                sql.append(" and ").append(tagTable + i).append(".");
+                sql.append(AbstractLogRecord.TAGS).append(" = ?");
+                parameters.add(tags.get(i).toString());
             }
         }
-
         sql.append(" order by ")
            .append(TIMESTAMP)
            .append(" ")
            .append(Order.DES.equals(queryOrder) ? "desc" : "asc");
 
-        Logs logs = new Logs();
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            buildLimit(sql, from, limit);
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, "select * " + sql.toString(), parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    Log log = new Log();
-                    log.setServiceId(resultSet.getString(SERVICE_ID));
-                    log.setServiceInstanceId(resultSet.getString(SERVICE_INSTANCE_ID));
-                    log.setEndpointId(resultSet.getString(ENDPOINT_ID));
-                    if (log.getEndpointId() != null) {
-                        log.setEndpointName(IDManager.EndpointID.analysisId(log.getEndpointId()).getEndpointName());
-                    }
-                    log.setTraceId(resultSet.getString(TRACE_ID));
-                    log.setTimestamp(resultSet.getLong(TIMESTAMP));
-                    log.setContentType(ContentType.instanceOf(resultSet.getInt(CONTENT_TYPE)));
-                    log.setContent(resultSet.getString(CONTENT));
-                    String dataBinaryBase64 = resultSet.getString(TAGS_RAW_DATA);
-                    if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
-                        parserDataBinary(dataBinaryBase64, log.getTags());
-                    }
-                    logs.getLogs().add(log);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-
-        return logs;
-    }
+        sql.append(" limit ").append(from + limit);
 
-    protected void buildLimit(StringBuilder sql, int from, int limit) {
-        sql.append(" LIMIT ").append(limit);
-        sql.append(" OFFSET ").append(from);
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCManagementDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCManagementDAO.java
index 1a335a6d61..b19e899ad1 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCManagementDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCManagementDAO.java
@@ -18,25 +18,26 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
+import lombok.RequiredArgsConstructor;
 import org.apache.skywalking.oap.server.core.analysis.management.ManagementData;
 import org.apache.skywalking.oap.server.core.storage.IManagementDAO;
 import org.apache.skywalking.oap.server.core.storage.StorageData;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.type.HashMapConverter;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
-import lombok.RequiredArgsConstructor;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
 
 /**
  * Synchronize storage H2 implements
  */
 @RequiredArgsConstructor
 public class JDBCManagementDAO extends JDBCSQLExecutor implements IManagementDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final StorageBuilder<ManagementData> storageBuilder;
 
     @Override
@@ -47,7 +48,7 @@ public class JDBCManagementDAO extends JDBCSQLExecutor implements IManagementDAO
                 return;
             }
 
-            SQLExecutor insertExecutor = getInsertExecutor(model.getName(), storageData, storageBuilder,
+            SQLExecutor insertExecutor = getInsertExecutor(model, storageData, 0, storageBuilder,
                                                            new HashMapConverter.ToStorage(), null);
             insertExecutor.invoke(connection);
         } catch (IOException | SQLException e) {
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetadataQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetadataQueryDAO.java
index cc3647f2c5..9db398be28 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetadataQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetadataQueryDAO.java
@@ -22,15 +22,8 @@ import com.google.common.base.Strings;
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
-import lombok.RequiredArgsConstructor;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
+import lombok.SneakyThrows;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.analysis.Layer;
 import org.apache.skywalking.oap.server.core.analysis.TimeBucket;
@@ -48,249 +41,406 @@ import org.apache.skywalking.oap.server.core.query.type.Process;
 import org.apache.skywalking.oap.server.core.query.type.Service;
 import org.apache.skywalking.oap.server.core.query.type.ServiceInstance;
 import org.apache.skywalking.oap.server.core.storage.query.IMetadataQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
-import org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.H2TableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCEntityConverters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static java.util.stream.Collectors.toList;
+import static org.apache.skywalking.oap.server.core.analysis.manual.instance.InstanceTraffic.PropertyUtil.LANGUAGE;
 
-@RequiredArgsConstructor
 public class JDBCMetadataQueryDAO implements IMetadataQueryDAO {
     private static final Gson GSON = new Gson();
 
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final int metadataQueryMaxSize;
+    private final TableHelper tableHelper;
+
+    public JDBCMetadataQueryDAO(JDBCClient jdbcClient, int metadataQueryMaxSize, ModuleManager moduleManager) {
+        this.jdbcClient = jdbcClient;
+        this.metadataQueryMaxSize = metadataQueryMaxSize;
+        this.tableHelper = new TableHelper(moduleManager, jdbcClient);
+    }
 
     @Override
-    public List<Service> listServices(final String layer, final String group) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select * from ").append(ServiceTraffic.INDEX_NAME);
-        if (StringUtil.isNotEmpty(layer) || StringUtil.isNotEmpty(group)) {
-            sql.append(" where ");
+    @SneakyThrows
+    public List<Service> listServices(final String layer, final String group) {
+        final var results = new ArrayList<Service>();
+        final var tables = tableHelper.getTablesWithinTTL(ServiceTraffic.INDEX_NAME);
+
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQLForListServices(layer, group, table);
+            results.addAll(jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                this::buildServices,
+                sqlAndParameters.parameters())
+            );
         }
+        return results
+            .stream()
+            .limit(metadataQueryMaxSize)
+            .collect(toList());
+    }
+
+    protected SQLAndParameters buildSQLForListServices(String layer, String group, String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>(5);
+        sql.append("select * from ").append(table)
+           .append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        parameters.add(ServiceTraffic.INDEX_NAME);
 
         if (StringUtil.isNotEmpty(layer)) {
-            sql.append(ServiceTraffic.LAYER).append("=?");
-            condition.add(Layer.valueOf(layer).value());
-        }
-        if (StringUtil.isNotEmpty(layer) && StringUtil.isNotEmpty(group)) {
-            sql.append(" and ");
+            sql.append(" and ").append(ServiceTraffic.LAYER).append(" = ?");
+            parameters.add(Layer.valueOf(layer).value());
         }
         if (StringUtil.isNotEmpty(group)) {
-            sql.append(ServiceTraffic.GROUP).append("=?");
-            condition.add(group);
+            sql.append(" and ").append(ServiceTraffic.GROUP).append(" = ?");
+            parameters.add(group);
         }
 
         sql.append(" limit ").append(metadataQueryMaxSize);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildServices(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     @Override
-    public List<Service> getServices(final String serviceId) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select * from ").append(ServiceTraffic.INDEX_NAME).append(" where ");
-        sql.append(ServiceTraffic.SERVICE_ID).append(" = ?");
-        condition.add(serviceId);
-        sql.append(" limit ").append(metadataQueryMaxSize);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]));
-            return buildServices(resultSet);
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public List<Service> getServices(final String serviceId) {
+        final var tables = tableHelper.getTablesWithinTTL(ServiceTraffic.INDEX_NAME);
+        final var results = new ArrayList<Service>();
+
+        for (String table : tables) {
+            final SQLAndParameters sqlAndParameters = buildSQLForGetServices(serviceId, table);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildServices,
+                    sqlAndParameters.parameters()
+                )
+            );
         }
+        return results
+            .stream()
+            .limit(metadataQueryMaxSize)
+            .collect(toList());
     }
 
-    @Override
-    public List<ServiceInstance> listInstances(Duration duration,
-                                               String serviceId) throws IOException {
-        final long minuteTimeBucket = TimeBucket.getMinuteTimeBucket(duration.getStartTimestamp());
-
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select * from ").append(InstanceTraffic.INDEX_NAME).append(" where ");
-        sql.append(InstanceTraffic.LAST_PING_TIME_BUCKET).append(" >= ?");
-        condition.add(minuteTimeBucket);
-        sql.append(" and ").append(InstanceTraffic.SERVICE_ID).append("=?");
-        condition.add(serviceId);
+    protected SQLAndParameters buildSQLForGetServices(String serviceId, String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>(5);
+        sql.append("select * from ").append(table)
+           .append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?")
+           .append(" and ").append(ServiceTraffic.SERVICE_ID).append(" = ?");
+        parameters.add(ServiceTraffic.INDEX_NAME);
+        parameters.add(serviceId);
         sql.append(" limit ").append(metadataQueryMaxSize);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]));
-            return buildInstances(resultSet);
+        return new SQLAndParameters(sql.toString(), parameters);
+    }
 
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @Override
+    @SneakyThrows
+    public List<ServiceInstance> listInstances(Duration duration,
+                                               String serviceId) {
+        final var results = new ArrayList<ServiceInstance>();
+
+        final var minuteTimeBucket = TimeBucket.getMinuteTimeBucket(duration.getStartTimestamp());
+
+        final var tables = tableHelper.getTablesForRead(
+            InstanceTraffic.INDEX_NAME,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQLForListInstances(serviceId, minuteTimeBucket, table);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildInstances,
+                    sqlAndParameters.parameters()
+                )
+            );
         }
+
+        return results
+            .stream()
+            .limit(metadataQueryMaxSize)
+            .collect(toList());
     }
 
-    @Override
-    public ServiceInstance getInstance(final String instanceId) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select * from ").append(InstanceTraffic.INDEX_NAME).append(" where ");
-        sql.append(H2TableInstaller.ID_COLUMN).append(" = ?");
-        condition.add(instanceId);
+    protected SQLAndParameters buildSQLForListInstances(String serviceId, long minuteTimeBucket, String table) {
+        final var  sql = new StringBuilder();
+        final var parameters = new ArrayList<>(5);
+        sql.append("select * from ").append(table).append(" where ")
+            .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(InstanceTraffic.INDEX_NAME);
+        sql.append(" and ").append(InstanceTraffic.LAST_PING_TIME_BUCKET).append(" >= ?");
+        parameters.add(minuteTimeBucket);
+        sql.append(" and ").append(InstanceTraffic.SERVICE_ID).append("=?");
+        parameters.add(serviceId);
         sql.append(" limit ").append(metadataQueryMaxSize);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]));
-            final List<ServiceInstance> instances = buildInstances(resultSet);
-            return instances.size() > 0 ? instances.get(0) : null;
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     @Override
-    public List<Endpoint> findEndpoint(String keyword, String serviceId, int limit) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select * from ").append(EndpointTraffic.INDEX_NAME).append(" where ");
-        sql.append(EndpointTraffic.SERVICE_ID).append("=?");
-        condition.add(serviceId);
-        if (!Strings.isNullOrEmpty(keyword)) {
-            sql.append(" and ").append(EndpointTraffic.NAME).append(" like concat('%',?,'%') ");
-            condition.add(keyword);
-        }
-        sql.append(" limit ").append(limit);
-
-        List<Endpoint> endpoints = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-
-                while (resultSet.next()) {
-                    Endpoint endpoint = new Endpoint();
-                    endpoint.setId(resultSet.getString(H2TableInstaller.ID_COLUMN));
-                    endpoint.setName(resultSet.getString(EndpointTraffic.NAME));
-                    endpoints.add(endpoint);
-                }
+    @SneakyThrows
+    public ServiceInstance getInstance(final String instanceId) {
+        final var tables = tableHelper.getTablesWithinTTL(InstanceTraffic.INDEX_NAME);
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(5);
+            sql.append("select * from ").append(table).append(" where ")
+                .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(InstanceTraffic.INDEX_NAME);
+            sql.append(" and ").append(JDBCTableInstaller.ID_COLUMN).append(" = ?");
+            condition.add(instanceId);
+            sql.append(" limit ").append(metadataQueryMaxSize);
+
+            final var result = jdbcClient.executeQuery(sql.toString(), resultSet -> {
+                final List<ServiceInstance> instances = buildInstances(resultSet);
+                return instances.size() > 0 ? instances.get(0) : null;
+            }, condition.toArray(new Object[0]));
+            if (result != null) {
+                return result;
             }
-        } catch (SQLException e) {
-            throw new IOException(e);
         }
-        return endpoints;
+
+        return null;
     }
 
     @Override
-    public List<Process> listProcesses(String serviceId, ProfilingSupportStatus supportStatus, long lastPingStartTimeBucket, long lastPingEndTimeBucket) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>();
-        sql.append("select * from ").append(ProcessTraffic.INDEX_NAME);
-        appendProcessWhereQuery(sql, condition, serviceId, null, null, supportStatus, lastPingStartTimeBucket, lastPingEndTimeBucket, false);
-        sql.append(" limit ").append(metadataQueryMaxSize);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildProcesses(resultSet);
+    @SneakyThrows
+    public List<Endpoint> findEndpoint(String keyword, String serviceId, int limit) {
+        final var results = new ArrayList<Endpoint>();
+        final var tables = tableHelper.getTablesWithinTTL(EndpointTraffic.INDEX_NAME);
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(5);
+            sql.append("select * from ").append(table).append(" where ")
+                .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(EndpointTraffic.INDEX_NAME);
+            sql.append(" and ").append(EndpointTraffic.SERVICE_ID).append("=?");
+            condition.add(serviceId);
+            sql.append(" and ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(EndpointTraffic.INDEX_NAME);
+            if (!Strings.isNullOrEmpty(keyword)) {
+                sql.append(" and ").append(EndpointTraffic.NAME).append(" like concat('%',?,'%') ");
+                condition.add(keyword);
             }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            sql.append(" limit ").append(limit);
+
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sql.toString(), resultSet -> {
+                        List<Endpoint> endpoints = new ArrayList<>();
+
+                        while (resultSet.next()) {
+                            final var endpointTraffic = new EndpointTraffic.Builder().storage2Entity(JDBCEntityConverters.toEntity(resultSet));
+
+                            Endpoint endpoint = new Endpoint();
+                            endpoint.setId(endpointTraffic.id().build());
+                            endpoint.setName(endpointTraffic.getName());
+                            endpoints.add(endpoint);
+                        }
+                        return endpoints;
+                    }, condition.toArray(new Object[0])));
         }
+        return results.stream().limit(limit).collect(toList());
     }
 
     @Override
-    public List<Process> listProcesses(String serviceInstanceId, Duration duration, boolean includeVirtual) throws IOException {
-        long lastPingStartTimeBucket = duration.getStartTimeBucket();
-        long lastPingEndTimeBucket = duration.getEndTimeBucket();
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>();
-        sql.append("select * from ").append(ProcessTraffic.INDEX_NAME);
-        appendProcessWhereQuery(sql, condition, null, serviceInstanceId, null, null, lastPingStartTimeBucket, lastPingEndTimeBucket, includeVirtual);
+    @SneakyThrows
+    public List<Process> listProcesses(String serviceId, ProfilingSupportStatus supportStatus, long lastPingStartTimeBucket, long lastPingEndTimeBucket) {
+        final var tables = tableHelper.getTablesForRead(
+            ProcessTraffic.INDEX_NAME,
+            lastPingStartTimeBucket,
+            lastPingEndTimeBucket
+        );
+        final var results = new ArrayList<Process>();
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQLForListProcesses(serviceId, supportStatus, lastPingStartTimeBucket, lastPingEndTimeBucket, table);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildProcesses,
+                    sqlAndParameters.parameters()
+                )
+            );
+        }
+
+        return results
+            .stream()
+            .limit(metadataQueryMaxSize)
+            .collect(toList());
+    }
+
+    protected SQLAndParameters buildSQLForListProcesses(
+        final String serviceId,
+        final ProfilingSupportStatus supportStatus,
+        final long lastPingStartTimeBucket,
+        final long lastPingEndTimeBucket,
+        final String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>();
+        sql.append("select * from ").append(table);
+        sql.append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        parameters.add(ProcessTraffic.INDEX_NAME);
+        appendProcessWhereQuery(sql, parameters, serviceId, null, null, supportStatus, lastPingStartTimeBucket, lastPingEndTimeBucket, false);
         sql.append(" limit ").append(metadataQueryMaxSize);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildProcesses(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     @Override
-    public List<Process> listProcesses(String agentId) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(2);
-        sql.append("select * from ").append(ProcessTraffic.INDEX_NAME);
-        appendProcessWhereQuery(sql, condition, null, null, agentId, null, 0, 0, false);
+    @SneakyThrows
+    public List<Process> listProcesses(String serviceInstanceId, Duration duration, boolean includeVirtual) {
+        final var tables = tableHelper.getTablesForRead(
+            ProcessTraffic.INDEX_NAME,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var results = new ArrayList<Process>();
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQLForListProcesses(serviceInstanceId, duration, includeVirtual, table);
+
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    this::buildProcesses,
+                    sqlAndParameters.parameters()
+                )
+            );
+        }
+
+        return results
+            .stream()
+            .limit(metadataQueryMaxSize)
+            .collect(toList());
+    }
+
+    protected SQLAndParameters buildSQLForListProcesses(String serviceInstanceId, Duration duration, boolean includeVirtual, String table) {
+        final var lastPingStartTimeBucket = duration.getStartTimeBucket();
+        final var lastPingEndTimeBucket = duration.getEndTimeBucket();
+        final var sql = new StringBuilder();
+        final var condition = new ArrayList<>();
+        sql.append("select * from ").append(table);
+        sql.append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        condition.add(ProcessTraffic.INDEX_NAME);
+        appendProcessWhereQuery(sql, condition, null, serviceInstanceId, null, null, lastPingStartTimeBucket, lastPingEndTimeBucket, includeVirtual);
         sql.append(" limit ").append(metadataQueryMaxSize);
+        return new SQLAndParameters(sql.toString(), condition);
+    }
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return buildProcesses(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @Override
+    @SneakyThrows
+    public List<Process> listProcesses(String agentId) {
+        final var tables = tableHelper.getTablesWithinTTL(ProcessTraffic.INDEX_NAME);
+        final var results = new ArrayList<Process>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(2);
+            sql.append("select * from ").append(table);
+            sql.append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+            condition.add(ProcessTraffic.INDEX_NAME);
+            appendProcessWhereQuery(sql, condition, null, null, agentId, null, 0, 0, false);
+            sql.append(" limit ").append(metadataQueryMaxSize);
+
+            results.addAll(
+                jdbcClient.executeQuery(sql.toString(), this::buildProcesses, condition.toArray(new Object[0]))
+            );
         }
+
+        return results
+            .stream()
+            .limit(metadataQueryMaxSize)
+            .collect(toList());
     }
 
     @Override
-    public long getProcessCount(String serviceId, ProfilingSupportStatus profilingSupportStatus, long lastPingStartTimeBucket, long lastPingEndTimeBucket) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select count(1) total from ").append(ProcessTraffic.INDEX_NAME);
-        appendProcessWhereQuery(sql, condition, serviceId, null, null, profilingSupportStatus,
-            lastPingStartTimeBucket, lastPingEndTimeBucket, false);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
+    @SneakyThrows
+    public long getProcessCount(String serviceId, ProfilingSupportStatus profilingSupportStatus, long lastPingStartTimeBucket, long lastPingEndTimeBucket) {
+        final var tables = tableHelper.getTablesForRead(
+            ProcessTraffic.INDEX_NAME,
+            lastPingStartTimeBucket,
+            lastPingEndTimeBucket
+        );
+        long total = 0;
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(5);
+            sql.append("select count(1) total from ").append(table);
+            sql.append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+            condition.add(ProcessTraffic.INDEX_NAME);
+
+            appendProcessWhereQuery(sql, condition, serviceId, null, null, profilingSupportStatus,
+                lastPingStartTimeBucket, lastPingEndTimeBucket, false);
+
+            total += jdbcClient.executeQuery(sql.toString(), resultSet -> {
                 if (!resultSet.next()) {
-                    return 0;
+                    return 0L;
                 }
                 return resultSet.getLong("total");
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            }, condition.toArray(new Object[0]));
         }
+
+        return total;
     }
 
     @Override
-    public long getProcessCount(String instanceId) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(3);
-        sql.append("select count(1) total from ").append(ProcessTraffic.INDEX_NAME);
-        appendProcessWhereQuery(sql, condition, null, instanceId, null, null, 0, 0, false);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
+    @SneakyThrows
+    public long getProcessCount(String instanceId) {
+        final var tables = tableHelper.getTablesWithinTTL(ProcessTraffic.INDEX_NAME);
+        long total = 0;
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(3);
+            sql.append("select count(1) total from ").append(table);
+            sql.append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+            condition.add(ProcessTraffic.INDEX_NAME);
+
+            appendProcessWhereQuery(sql, condition, null, instanceId, null, null, 0, 0, false);
+
+            total += jdbcClient.executeQuery(sql.toString(), resultSet -> {
                 if (!resultSet.next()) {
-                    return 0;
+                    return 0L;
                 }
                 return resultSet.getLong("total");
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            }, condition.toArray(new Object[0]));
         }
+
+        return total;
     }
 
     private List<Service> buildServices(ResultSet resultSet) throws SQLException {
         List<Service> services = new ArrayList<>();
         while (resultSet.next()) {
-            String serviceName = resultSet.getString(ServiceTraffic.NAME);
+            final var serviceTraffic = new ServiceTraffic.Builder().storage2Entity(JDBCEntityConverters.toEntity(resultSet));
+
+            String serviceName = serviceTraffic.getName();
             Service service = new Service();
-            service.setId(resultSet.getString(ServiceTraffic.SERVICE_ID));
+            service.setId(serviceTraffic.getServiceId());
             service.setName(serviceName);
-            service.setShortName(resultSet.getString(ServiceTraffic.SHORT_NAME));
-            service.setGroup(resultSet.getString(ServiceTraffic.GROUP));
-            service.getLayers().add(Layer.valueOf(resultSet.getInt(ServiceTraffic.LAYER)).name());
+            service.setShortName(serviceTraffic.getShortName());
+            service.setGroup(serviceTraffic.getGroup());
+            service.getLayers().add(serviceTraffic.getLayer().name());
             services.add(service);
         }
         return services;
@@ -300,28 +450,28 @@ public class JDBCMetadataQueryDAO implements IMetadataQueryDAO {
         List<ServiceInstance> serviceInstances = new ArrayList<>();
 
         while (resultSet.next()) {
+            final var instanceTraffic =
+                new InstanceTraffic.Builder().storage2Entity(JDBCEntityConverters.toEntity(resultSet));
+
             ServiceInstance serviceInstance = new ServiceInstance();
-            serviceInstance.setId(resultSet.getString(H2TableInstaller.ID_COLUMN));
-            serviceInstance.setName(resultSet.getString(InstanceTraffic.NAME));
+            serviceInstance.setId(instanceTraffic.id().build());
+            serviceInstance.setName(instanceTraffic.getName());
             serviceInstance.setInstanceUUID(serviceInstance.getId());
 
-            String propertiesString = resultSet.getString(InstanceTraffic.PROPERTIES);
-            if (!Strings.isNullOrEmpty(propertiesString)) {
-                JsonObject properties = GSON.fromJson(propertiesString, JsonObject.class);
+            JsonObject properties = instanceTraffic.getProperties();
+            if (properties != null) {
                 for (Map.Entry<String, JsonElement> property : properties.entrySet()) {
                     String key = property.getKey();
                     String value = property.getValue().getAsString();
-                    if (key.equals(InstanceTraffic.PropertyUtil.LANGUAGE)) {
+                    if (key.equals(LANGUAGE)) {
                         serviceInstance.setLanguage(Language.value(value));
                     } else {
                         serviceInstance.getAttributes().add(new Attribute(key, value));
                     }
-
                 }
             } else {
                 serviceInstance.setLanguage(Language.UNKNOWN);
             }
-
             serviceInstances.add(serviceInstance);
         }
         return serviceInstances;
@@ -331,12 +481,8 @@ public class JDBCMetadataQueryDAO implements IMetadataQueryDAO {
                                          String agentId, final ProfilingSupportStatus profilingSupportStatus,
                                          final long lastPingStartTimeBucket, final long lastPingEndTimeBucket,
                                          boolean includeVirtual) {
-        if (StringUtil.isNotEmpty(serviceId) || StringUtil.isNotEmpty(instanceId) || StringUtil.isNotEmpty(agentId)) {
-            sql.append(" where ");
-        }
-
         if (StringUtil.isNotEmpty(serviceId)) {
-            sql.append(ProcessTraffic.SERVICE_ID).append("=?");
+            sql.append(" and ").append(ProcessTraffic.SERVICE_ID).append("=?");
             condition.add(serviceId);
         }
         if (StringUtil.isNotEmpty(instanceId)) {
@@ -377,56 +523,65 @@ public class JDBCMetadataQueryDAO implements IMetadataQueryDAO {
     }
 
     @Override
-    public Process getProcess(String processId) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(5);
-        sql.append("select * from ").append(ProcessTraffic.INDEX_NAME).append(" where ");
-        sql.append(H2TableInstaller.ID_COLUMN).append(" = ?");
-        condition.add(processId);
-        sql.append(" limit ").append(metadataQueryMaxSize);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            ResultSet resultSet = jdbcClient.executeQuery(
-                    connection, sql.toString(), condition.toArray(new Object[0]));
-            final List<Process> processes = buildProcesses(resultSet);
-            return processes.size() > 0 ? processes.get(0) : null;
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public Process getProcess(String processId) {
+        final var tables = tableHelper.getTablesWithinTTL(ProcessTraffic.INDEX_NAME);
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(5);
+            sql.append("select * from ").append(table).append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+            condition.add(ProcessTraffic.INDEX_NAME);
+            sql.append(" and ").append(JDBCTableInstaller.ID_COLUMN).append(" = ?");
+            condition.add(processId);
+            sql.append(" limit ").append(metadataQueryMaxSize);
+
+            final var result = jdbcClient.executeQuery(
+                sql.toString(),
+                resultSet -> {
+                    final List<Process> processes = buildProcesses(resultSet);
+                    return processes.size() > 0 ? processes.get(0) : null;
+                },
+                condition.toArray(new Object[0]));
+            if (result != null) {
+                return result;
+            }
         }
+        return null;
     }
 
     private List<Process> buildProcesses(ResultSet resultSet) throws SQLException {
         List<Process> processes = new ArrayList<>();
         while (resultSet.next()) {
-            final Process process = new Process();
-            process.setId(resultSet.getString(H2TableInstaller.ID_COLUMN));
-            process.setName(resultSet.getString(ProcessTraffic.NAME));
-            final String serviceId = resultSet.getString(ProcessTraffic.SERVICE_ID);
+            final var processTraffic = new ProcessTraffic.Builder().storage2Entity(JDBCEntityConverters.toEntity(resultSet));
+
+            Process process = new Process();
+            process.setId(processTraffic.id().build());
+            process.setName(processTraffic.getName());
+            final String serviceId = processTraffic.getServiceId();
             process.setServiceId(serviceId);
-            final IDManager.ServiceID.ServiceIDDefinition serviceIDDefinition = IDManager.ServiceID.analysisId(serviceId);
-            process.setServiceName(serviceIDDefinition.getName());
-            final String instanceId = resultSet.getString(ProcessTraffic.INSTANCE_ID);
+            process.setServiceName(IDManager.ServiceID.analysisId(serviceId).getName());
+            final String instanceId = processTraffic.getInstanceId();
             process.setInstanceId(instanceId);
-            final IDManager.ServiceInstanceID.InstanceIDDefinition instanceIDDefinition = IDManager.ServiceInstanceID.analysisId(instanceId);
-            process.setInstanceName(instanceIDDefinition.getName());
-            process.setAgentId(resultSet.getString(ProcessTraffic.AGENT_ID));
-            process.setDetectType(ProcessDetectType.valueOf(resultSet.getInt(ProcessTraffic.DETECT_TYPE)).name());
-            process.setProfilingSupportStatus(ProfilingSupportStatus.valueOf(resultSet.getInt(ProcessTraffic.PROFILING_SUPPORT_STATUS)).name());
-            String propertiesString = resultSet.getString(ProcessTraffic.PROPERTIES);
-            if (!Strings.isNullOrEmpty(propertiesString)) {
-                JsonObject properties = GSON.fromJson(propertiesString, JsonObject.class);
+            process.setInstanceName(IDManager.ServiceInstanceID.analysisId(instanceId).getName());
+            process.setAgentId(processTraffic.getAgentId());
+            process.setDetectType(ProcessDetectType.valueOf(processTraffic.getDetectType()).name());
+            process.setProfilingSupportStatus(ProfilingSupportStatus.valueOf(processTraffic.getProfilingSupportStatus()).name());
+
+            JsonObject properties = processTraffic.getProperties();
+            if (properties != null) {
                 for (Map.Entry<String, JsonElement> property : properties.entrySet()) {
                     String key = property.getKey();
                     String value = property.getValue().getAsString();
                     process.getAttributes().add(new Attribute(key, value));
                 }
             }
-            final String labelJsonString = resultSet.getString(ProcessTraffic.LABELS_JSON);
-            if (!Strings.isNullOrEmpty(labelJsonString)) {
-                List<String> labels = GSON.<List<String>>fromJson(labelJsonString, ArrayList.class);
+            final String labelsJson = processTraffic.getLabelsJson();
+            if (StringUtils.isNotEmpty(labelsJson)) {
+                final List<String> labels = GSON.<List<String>>fromJson(labelsJson, ArrayList.class);
                 process.getLabels().addAll(labels);
             }
-
             processes.add(process);
         }
         return processes;
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsDAO.java
index 082bf2d913..db036e86c2 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsDAO.java
@@ -18,10 +18,7 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
+import lombok.RequiredArgsConstructor;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
 import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
 import org.apache.skywalking.oap.server.core.storage.SessionCacheCallback;
@@ -29,20 +26,26 @@ import org.apache.skywalking.oap.server.core.storage.StorageData;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.type.HashMapConverter;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
-import lombok.RequiredArgsConstructor;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import static java.util.stream.Collectors.toList;
 
 @RequiredArgsConstructor
 public class JDBCMetricsDAO extends JDBCSQLExecutor implements IMetricsDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final StorageBuilder<Metrics> storageBuilder;
 
     @Override
-    public List<Metrics> multiGet(Model model, List<Metrics> metrics) throws IOException {
-        String[] ids = metrics.stream().map(m -> m.id().build()).collect(Collectors.toList()).toArray(new String[] {});
-        List<StorageData> storageDataList = getByIDs(jdbcClient, model.getName(), ids, storageBuilder);
-        List<Metrics> result = new ArrayList<>(storageDataList.size());
+    public List<Metrics> multiGet(Model model, List<Metrics> metrics) throws Exception {
+        final var ids = metrics.stream().map(m -> TableHelper.generateId(model, m.id().build())).collect(toList());
+        final var storageDataList = getByIDs(jdbcClient, model.getName(), ids, storageBuilder);
+        final var result = new ArrayList<Metrics>(storageDataList.size());
         for (StorageData storageData : storageDataList) {
             result.add((Metrics) storageData);
         }
@@ -51,11 +54,11 @@ public class JDBCMetricsDAO extends JDBCSQLExecutor implements IMetricsDAO {
 
     @Override
     public SQLExecutor prepareBatchInsert(Model model, Metrics metrics, SessionCacheCallback callback) throws IOException {
-        return getInsertExecutor(model.getName(), metrics, storageBuilder, new HashMapConverter.ToStorage(), callback);
+        return getInsertExecutor(model, metrics, metrics.getTimeBucket(), storageBuilder, new HashMapConverter.ToStorage(), callback);
     }
 
     @Override
-    public SQLExecutor prepareBatchUpdate(Model model, Metrics metrics, SessionCacheCallback callback) throws IOException {
-        return getUpdateExecutor(model.getName(), metrics, storageBuilder, callback);
+    public SQLExecutor prepareBatchUpdate(Model model, Metrics metrics, SessionCacheCallback callback) {
+        return getUpdateExecutor(model, metrics, metrics.getTimeBucket(), storageBuilder, callback);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsQueryDAO.java
index 303c4c304a..19edf4bb2d 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCMetricsQueryDAO.java
@@ -18,45 +18,52 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
-import org.apache.skywalking.oap.server.core.query.PointOfTime;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.input.MetricsCondition;
 import org.apache.skywalking.oap.server.core.query.sql.Function;
 import org.apache.skywalking.oap.server.core.query.type.HeatMap;
-import org.apache.skywalking.oap.server.core.query.type.IntValues;
 import org.apache.skywalking.oap.server.core.query.type.KVInt;
 import org.apache.skywalking.oap.server.core.query.type.MetricsValues;
 import org.apache.skywalking.oap.server.core.storage.annotation.ValueColumnMetadata;
 import org.apache.skywalking.oap.server.core.storage.query.IMetricsQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
-import lombok.RequiredArgsConstructor;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.stream.Collectors;
 
 @RequiredArgsConstructor
 public class JDBCMetricsQueryDAO extends JDBCSQLExecutor implements IMetricsQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
+    @SneakyThrows
     public long readMetricsValue(final MetricsCondition condition,
                                 String valueColumnName,
-                                final Duration duration) throws IOException {
-        final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
-        List<String> ids = new ArrayList<>(pointOfTimes.size());
-        final String entityId = condition.getEntity().buildId();
-        pointOfTimes.forEach(pointOfTime -> {
-            ids.add(pointOfTime.id(entityId));
-        });
-        int defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());
-        final Function function = ValueColumnMetadata.INSTANCE.getValueFunction(condition.getName());
+                                final Duration duration) {
+        final var tables = tableHelper.getTablesForRead(
+            condition.getName(),
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+
+        final var pointOfTimes = duration.assembleDurationPoints();
+        final var entityId = condition.getEntity().buildId();
+        final var ids =
+            pointOfTimes
+                .stream()
+                .map(pointOfTime -> TableHelper.generateId(condition.getName(), pointOfTime.id(entityId)))
+                .collect(Collectors.toList());
+        final var defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());
+        final var function = ValueColumnMetadata.INSTANCE.getValueFunction(condition.getName());
         if (function == Function.Latest) {
             return readMetricsValues(condition, valueColumnName, duration).getValues().latestValue(defaultValue);
         }
@@ -68,38 +75,37 @@ public class JDBCMetricsQueryDAO extends JDBCSQLExecutor implements IMetricsQuer
             default:
                 op = "sum";
         }
-        StringBuilder sql = buildMetricsValueSql(op, valueColumnName, condition.getName());
-        List<Object> parameters = new ArrayList();
-        if (entityId != null) {
-            sql.append(Metrics.ENTITY_ID + " = ? and ");
-            parameters.add(entityId);
-        }
-        sql.append("id in (");
-        for (int i = 0; i < ids.size(); i++) {
-            if (i == 0) {
-                sql.append("?");
-            } else {
-                sql.append(",?");
+
+        final var results = new ArrayList<Long>();
+        for (String table : tables) {
+            final var sql = buildMetricsValueSql(op, valueColumnName, table);
+            final var parameters = new ArrayList<>();
+            if (entityId != null) {
+                sql.append(Metrics.ENTITY_ID + " = ? and ");
+                parameters.add(entityId);
             }
-            parameters.add(ids.get(i));
-        }
-        sql.append(")");
-        sql.append(" group by " + Metrics.ENTITY_ID);
+            sql.append("id in ");
+            sql.append(ids.stream().map(it -> "?").collect(Collectors.joining(", ", "(", ")")));
+            parameters.addAll(ids);
+            sql.append(" and ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            parameters.add(condition.getName());
+            sql.append(" group by " + Metrics.ENTITY_ID);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection,
+            results.add(jdbcClient.executeQuery(
                 sql.toString(),
+                resultSet -> {
+                    if (resultSet.next()) {
+                        return resultSet.getLong("result");
+                    }
+                    return (long) defaultValue;
+                },
                 parameters.toArray(new Object[0])
-            )) {
-                if (resultSet.next()) {
-                    return resultSet.getLong("result");
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            ));
         }
-        return defaultValue;
+        if (op.equals("avg")) {
+            return results.stream().mapToLong(it -> it).sum() / results.size();
+        }
+        return results.stream().mapToLong(it -> it).sum();
     }
 
     protected StringBuilder buildMetricsValueSql(String op, String valueColumnName, String conditionName) {
@@ -108,48 +114,51 @@ public class JDBCMetricsQueryDAO extends JDBCSQLExecutor implements IMetricsQuer
     }
 
     @Override
+    @SneakyThrows
     public MetricsValues readMetricsValues(final MetricsCondition condition,
                                            final String valueColumnName,
-                                           final Duration duration) throws IOException {
-        final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
-        List<String> ids = new ArrayList<>(pointOfTimes.size());
-        final String entityId = condition.getEntity().buildId();
-        pointOfTimes.forEach(pointOfTime -> {
-            ids.add(pointOfTime.id(entityId));
-        });
-
-        StringBuilder sql = new StringBuilder(
-            "select id, " + valueColumnName + " from " + condition.getName() + " where id in (");
-        List<Object> parameters = new ArrayList();
-        for (int i = 0; i < ids.size(); i++) {
-            if (i == 0) {
-                sql.append("?");
-            } else {
-                sql.append(",?");
-            }
-            parameters.add(ids.get(i));
-        }
-        sql.append(")");
+                                           final Duration duration) {
+        final var metricsValues = new MetricsValues();
+        // Label is null, because in readMetricsValues, no label parameter.
+        final var intValues = metricsValues.getValues();
+
+        final var tables = tableHelper.getTablesForRead(
+            condition.getName(),
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
 
-        buildShardingCondition(sql, parameters, entityId);
+        final var pointOfTimes = duration.assembleDurationPoints();
+        final var entityId = condition.getEntity().buildId();
+        final var ids =
+            pointOfTimes
+                .stream()
+                .map(pointOfTime -> TableHelper.generateId(condition.getName(), pointOfTime.id(entityId)))
+                .collect(Collectors.toList());
 
-        MetricsValues metricsValues = new MetricsValues();
-        // Label is null, because in readMetricsValues, no label parameter.
-        final IntValues intValues = metricsValues.getValues();
-
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    KVInt kv = new KVInt();
-                    kv.setId(resultSet.getString("id"));
-                    kv.setValue(resultSet.getLong(valueColumnName));
-                    intValues.addKVInt(kv);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+        for (final var table : tables) {
+            final var sql = new StringBuilder("select id, " + valueColumnName + " from " + table)
+                .append(" where id in ")
+                .append(
+                    ids.stream()
+                       .map(it -> "?")
+                       .collect(Collectors.joining(", ", "(", ")"))
+                );
+
+            buildShardingCondition(sql, ids, entityId);
+
+            jdbcClient.executeQuery(
+                sql.toString(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        final var kv = new KVInt();
+                        kv.setId(resultSet.getString("id"));
+                        kv.setValue(resultSet.getLong(valueColumnName));
+                        intValues.addKVInt(kv);
+                    }
+                    return null;
+                },
+                ids.toArray(new Object[0]));
         }
 
         metricsValues.setValues(
@@ -159,49 +168,52 @@ public class JDBCMetricsQueryDAO extends JDBCSQLExecutor implements IMetricsQuer
     }
 
     @Override
+    @SneakyThrows
     public List<MetricsValues> readLabeledMetricsValues(final MetricsCondition condition,
                                                         final String valueColumnName,
                                                         final List<String> labels,
-                                                        final Duration duration) throws IOException {
-        final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
-        List<String> ids = new ArrayList<>(pointOfTimes.size());
-        final String entityId = condition.getEntity().buildId();
-        pointOfTimes.forEach(pointOfTime -> {
-            ids.add(pointOfTime.id(entityId));
-        });
-
-        StringBuilder sql = new StringBuilder(
-            "select id, " + valueColumnName + " from " + condition.getName() + " where id in (");
-
-        List<Object> parameters = new ArrayList();
-        for (int i = 0; i < ids.size(); i++) {
-            if (i == 0) {
-                sql.append("?");
-            } else {
-                sql.append(",?");
-            }
-            parameters.add(ids.get(i));
-        }
-        sql.append(")");
+                                                        final Duration duration) {
+        final var idMap = new HashMap<String, DataTable>();
+        final var tables = tableHelper.getTablesForRead(
+            condition.getName(),
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
 
-        buildShardingCondition(sql, parameters, entityId);
+        final var pointOfTimes = duration.assembleDurationPoints();
+        final var entityId = condition.getEntity().buildId();
+        final var ids =
+            pointOfTimes
+                .stream()
+                .map(pointOfTime -> TableHelper.generateId(condition.getName(), pointOfTime.id(entityId)))
+                .collect(Collectors.toList());
 
-        Map<String, DataTable> idMap = new HashMap<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    String id = resultSet.getString("id");
+        for (final var table : tables) {
+            final var sql = new StringBuilder("select id, " + valueColumnName + " from " + table)
+                .append(" where id in ")
+                .append(
+                    ids.stream().map(it -> "?")
+                       .collect(Collectors.joining(", ", "(", ")"))
+                );
 
-                    DataTable multipleValues = new DataTable(5);
-                    multipleValues.toObject(resultSet.getString(valueColumnName));
+            buildShardingCondition(sql, ids, entityId);
 
-                   idMap.put(id, multipleValues);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            jdbcClient.executeQuery(
+                sql.toString(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        String id = resultSet.getString("id");
+
+                        DataTable multipleValues = new DataTable(5);
+                        multipleValues.toObject(resultSet.getString(valueColumnName));
+
+                        idMap.put(id, multipleValues);
+                    }
+                    return null;
+                },
+                ids.toArray(new Object[0]));
         }
+
         return Util.sortValues(
             Util.composeLabelValue(condition, labels, ids, idMap),
             ids,
@@ -210,52 +222,55 @@ public class JDBCMetricsQueryDAO extends JDBCSQLExecutor implements IMetricsQuer
     }
 
     @Override
+    @SneakyThrows
     public HeatMap readHeatMap(final MetricsCondition condition,
                                final String valueColumnName,
-                               final Duration duration) throws IOException {
-        final List<PointOfTime> pointOfTimes = duration.assembleDurationPoints();
-        List<String> ids = new ArrayList<>(pointOfTimes.size());
-        final String entityId = condition.getEntity().buildId();
-        pointOfTimes.forEach(pointOfTime -> {
-            ids.add(pointOfTime.id(entityId));
-        });
-
-        StringBuilder sql = new StringBuilder(
-            "select id, " + valueColumnName + " dataset, id from " + condition.getName() + " where id in (");
-        List<Object> parameters = new ArrayList();
-        for (int i = 0; i < ids.size(); i++) {
-            if (i == 0) {
-                sql.append("?");
-            } else {
-                sql.append(",?");
-            }
-            parameters.add(ids.get(i));
-        }
-        sql.append(")");
+                               final Duration duration) {
+        final var tables = tableHelper.getTablesForRead(
+            condition.getName(),
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var heatMap = new HeatMap();
 
-        buildShardingCondition(sql, parameters, entityId);
+        for (String table : tables) {
+            final var pointOfTimes = duration.assembleDurationPoints();
+            final var entityId = condition.getEntity().buildId();
+            final var ids =
+                pointOfTimes
+                    .stream()
+                    .map(pointOfTime -> TableHelper.generateId(condition.getName(), pointOfTime.id(entityId)))
+                    .collect(Collectors.toList());
 
-        final int defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());
+            final var sql = new StringBuilder("select id, " + valueColumnName + " dataset, id from " + table)
+                .append(" where id in ")
+                .append(
+                    ids.stream()
+                       .map(it -> "?")
+                       .collect(Collectors.joining(", ", "(", ")"))
+                );
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            HeatMap heatMap = new HeatMap();
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), parameters.toArray(new Object[0]))) {
+            buildShardingCondition(sql, ids, entityId);
 
-                while (resultSet.next()) {
-                    heatMap.buildColumn(
-                        resultSet.getString("id"), resultSet.getString("dataset"), defaultValue);
-                }
-            }
+            final int defaultValue = ValueColumnMetadata.INSTANCE.getDefaultValue(condition.getName());
 
-            heatMap.fixMissingColumns(ids, defaultValue);
+            jdbcClient.executeQuery(
+                sql.toString(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        heatMap.buildColumn(
+                            resultSet.getString("id"), resultSet.getString("dataset"), defaultValue);
+                    }
+                    heatMap.fixMissingColumns(ids, defaultValue);
 
-            return heatMap;
-        } catch (SQLException e) {
-            throw new IOException(e);
+                    return null;
+                },
+                ids.toArray(new Object[0]));
         }
+
+        return heatMap;
     }
 
-    protected void buildShardingCondition(StringBuilder sql, List<Object> parameters, String entityId) {
+    protected void buildShardingCondition(StringBuilder sql, List<String> parameters, String entityId) {
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNetworkAddressAliasDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNetworkAddressAliasDAO.java
index 63e638ff5b..e12be5eb25 100755
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNetworkAddressAliasDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNetworkAddressAliasDAO.java
@@ -18,50 +18,60 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.analysis.manual.networkalias.NetworkAddressAlias;
 import org.apache.skywalking.oap.server.core.storage.cache.INetworkAddressAliasDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
-import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.library.module.ModuleManager;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
+import java.util.List;
 
 @Slf4j
-@RequiredArgsConstructor
 public class JDBCNetworkAddressAliasDAO extends JDBCSQLExecutor implements INetworkAddressAliasDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
+
+    public JDBCNetworkAddressAliasDAO(JDBCClient jdbcClient, ModuleManager moduleManager) {
+        this.jdbcClient = jdbcClient;
+        this.tableHelper = new TableHelper(moduleManager, jdbcClient);
+    }
 
     @Override
+    @SneakyThrows
     public List<NetworkAddressAlias> loadLastUpdate(long lastUpdateTime) {
-        List<NetworkAddressAlias> networkAddressAliases = new ArrayList<>();
+        final var tables = tableHelper.getTablesWithinTTL(NetworkAddressAlias.INDEX_NAME);
+        final var results = new ArrayList<NetworkAddressAlias>();
 
-        try {
-            StringBuilder sql = new StringBuilder("select * from ");
-            sql.append(NetworkAddressAlias.INDEX_NAME);
-            sql.append(" where ").append(NetworkAddressAlias.LAST_UPDATE_TIME_BUCKET).append(">?");
+        for (final var table : tables) {
+            final var sql = new StringBuilder()
+                .append("select * from ").append(table)
+                .append(" where ")
+                .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ")
+                .append(" and ").append(NetworkAddressAlias.LAST_UPDATE_TIME_BUCKET).append(">?");
 
-            try (Connection connection = jdbcClient.getConnection()) {
-                try (ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), lastUpdateTime)) {
-                    NetworkAddressAlias networkAddressAlias;
-                    do {
-                        networkAddressAlias = (NetworkAddressAlias) toStorageData(
-                            resultSet, NetworkAddressAlias.INDEX_NAME, new NetworkAddressAlias.Builder());
-                        if (networkAddressAlias != null) {
-                            networkAddressAliases.add(networkAddressAlias);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sql.toString(),
+                    resultSet -> {
+                        List<NetworkAddressAlias> networkAddressAliases = new ArrayList<>();
+                        NetworkAddressAlias networkAddressAlias;
+                        do {
+                            networkAddressAlias = (NetworkAddressAlias) toStorageData(
+                                resultSet, NetworkAddressAlias.INDEX_NAME, new NetworkAddressAlias.Builder());
+                            if (networkAddressAlias != null) {
+                                networkAddressAliases.add(networkAddressAlias);
+                            }
                         }
-                    }
-                    while (networkAddressAlias != null);
-                }
-            } catch (SQLException e) {
-                throw new IOException(e);
-            }
-        } catch (Throwable t) {
-            log.error(t.getMessage(), t);
+                        while (networkAddressAlias != null);
+                        return networkAddressAliases;
+                    },
+                    NetworkAddressAlias.INDEX_NAME, lastUpdateTime)
+            );
         }
-        return networkAddressAliases;
+        return results;
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNoneStreamDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNoneStreamDAO.java
index 7a87059e08..cc6f4148f2 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNoneStreamDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCNoneStreamDAO.java
@@ -18,30 +18,31 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
+import lombok.RequiredArgsConstructor;
 import org.apache.skywalking.oap.server.core.analysis.config.NoneStream;
 import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.type.HashMapConverter;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
-import lombok.RequiredArgsConstructor;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
 
 /**
  * Synchronize storage H2 implements
  */
 @RequiredArgsConstructor
 public class JDBCNoneStreamDAO extends JDBCSQLExecutor implements INoneStreamDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final StorageBuilder<NoneStream> storageBuilder;
 
     @Override
     public void insert(Model model, NoneStream noneStream) throws IOException {
         try (Connection connection = jdbcClient.getConnection()) {
-            SQLExecutor insertExecutor = getInsertExecutor(model.getName(), noneStream, storageBuilder, new HashMapConverter.ToStorage(), null);
+            SQLExecutor insertExecutor = getInsertExecutor(model, noneStream, 0, storageBuilder, new HashMapConverter.ToStorage(), null);
             insertExecutor.invoke(connection);
         } catch (IOException | SQLException e) {
             throw new IOException(e.getMessage(), e);
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskLogQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskLogQueryDAO.java
index 290c135598..6866ccd408 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskLogQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskLogQueryDAO.java
@@ -18,43 +18,62 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.profiling.trace.ProfileTaskLogRecord;
 import org.apache.skywalking.oap.server.core.query.type.ProfileTaskLog;
 import org.apache.skywalking.oap.server.core.query.type.ProfileTaskLogOperationType;
 import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskLogQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
-import lombok.RequiredArgsConstructor;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
 
 @RequiredArgsConstructor
 public class JDBCProfileTaskLogQueryDAO implements IProfileTaskLogQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<ProfileTaskLog> getTaskLogList() throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        final ArrayList<Object> condition = new ArrayList<>(1);
-        sql.append("select * from ").append(ProfileTaskLogRecord.INDEX_NAME).append(" where 1=1 ");
+    @SneakyThrows
+    public List<ProfileTaskLog> getTaskLogList() {
+        final var tables = tableHelper.getTablesWithinTTL(ProfileTaskLogRecord.INDEX_NAME);
+        final var results = new ArrayList<ProfileTaskLog>();
 
-        sql.append("ORDER BY ").append(ProfileTaskLogRecord.OPERATION_TIME).append(" DESC ");
+        for (final var table : tables) {
+            final var sqlAndParameters = buildSQL(table);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), condition.toArray(new Object[0]))) {
-                final List<ProfileTaskLog> tasks = new ArrayList<>();
-                while (resultSet.next()) {
-                    tasks.add(parseLog(resultSet));
-                }
-                return tasks;
-            }
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e);
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sqlAndParameters.sql(),
+                    resultSet -> {
+                        final List<ProfileTaskLog> tasks = new ArrayList<>();
+                        while (resultSet.next()) {
+                            tasks.add(parseLog(resultSet));
+                        }
+                        return tasks;
+                    },
+                    sqlAndParameters.parameters())
+            );
         }
+
+        return results;
+    }
+
+    protected SQLAndParameters buildSQL(String table) {
+        final var sql = new StringBuilder();
+        final var parameters = new ArrayList<>(2);
+        sql.append("select * from ").append(table)
+           .append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(ProfileTaskLogRecord.INDEX_NAME);
+
+        sql.append(" ORDER BY ").append(ProfileTaskLogRecord.OPERATION_TIME).append(" DESC ");
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 
     private ProfileTaskLog parseLog(ResultSet data) throws SQLException {
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskQueryDAO.java
index 7b3a2cd1d8..97d5133e1c 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileTaskQueryDAO.java
@@ -18,93 +18,125 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
+import lombok.SneakyThrows;
+import org.apache.skywalking.oap.server.core.profiling.trace.ProfileTaskRecord;
+import org.apache.skywalking.oap.server.core.query.type.ProfileTask;
+import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskQueryDAO;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.library.module.ModuleManager;
+import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.LinkedList;
 import java.util.List;
-import org.apache.skywalking.oap.server.library.util.StringUtil;
-import lombok.RequiredArgsConstructor;
-import org.apache.skywalking.oap.server.core.profiling.trace.ProfileTaskRecord;
-import org.apache.skywalking.oap.server.core.query.type.ProfileTask;
-import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileTaskQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
 
-@RequiredArgsConstructor
+import static java.util.stream.Collectors.toList;
+
 public class JDBCProfileTaskQueryDAO implements IProfileTaskQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
+
+    public JDBCProfileTaskQueryDAO(JDBCClient jdbcClient, ModuleManager moduleManager) {
+        this.jdbcClient = jdbcClient;
+        this.tableHelper = new TableHelper(moduleManager, jdbcClient);
+    }
 
     @Override
+    @SneakyThrows
     public List<ProfileTask> getTaskList(String serviceId, String endpointName, Long startTimeBucket,
-                                         Long endTimeBucket, Integer limit) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        final ArrayList<Object> condition = new ArrayList<>(4);
-        sql.append("select * from ").append(ProfileTaskRecord.INDEX_NAME).append(" where 1=1 ");
-
-        if (startTimeBucket != null) {
-            sql.append(" and ").append(ProfileTaskRecord.TIME_BUCKET).append(" >= ? ");
-            condition.add(startTimeBucket);
-        }
+                                         Long endTimeBucket, Integer limit) {
+        final var results = new ArrayList<ProfileTask>();
+        final var tables = startTimeBucket == null || endTimeBucket == null ?
+            tableHelper.getTablesWithinTTL(ProfileTaskRecord.INDEX_NAME) :
+            tableHelper.getTablesForRead(ProfileTaskRecord.INDEX_NAME, startTimeBucket, endTimeBucket);
+        for (final var table : tables) {
+            final var condition = new ArrayList<>(4);
+            final var sql = new StringBuilder()
+                .append("select * from ").append(table)
+                .append(" where ").append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(ProfileTaskRecord.INDEX_NAME);
 
-        if (endTimeBucket != null) {
-            sql.append(" and ").append(ProfileTaskRecord.TIME_BUCKET).append(" <= ? ");
-            condition.add(endTimeBucket);
-        }
+            if (startTimeBucket != null) {
+                sql.append(" and ").append(ProfileTaskRecord.TIME_BUCKET).append(" >= ? ");
+                condition.add(startTimeBucket);
+            }
 
-        if (StringUtil.isNotEmpty(serviceId)) {
-            sql.append(" and ").append(ProfileTaskRecord.SERVICE_ID).append("=? ");
-            condition.add(serviceId);
-        }
+            if (endTimeBucket != null) {
+                sql.append(" and ").append(ProfileTaskRecord.TIME_BUCKET).append(" <= ? ");
+                condition.add(endTimeBucket);
+            }
 
-        if (StringUtil.isNotEmpty(endpointName)) {
-            sql.append(" and ").append(ProfileTaskRecord.ENDPOINT_NAME).append("=?");
-            condition.add(endpointName);
-        }
+            if (StringUtil.isNotEmpty(serviceId)) {
+                sql.append(" and ").append(ProfileTaskRecord.SERVICE_ID).append("=? ");
+                condition.add(serviceId);
+            }
 
-        sql.append(" ORDER BY ").append(ProfileTaskRecord.START_TIME).append(" DESC ");
+            if (StringUtil.isNotEmpty(endpointName)) {
+                sql.append(" and ").append(ProfileTaskRecord.ENDPOINT_NAME).append("=?");
+                condition.add(endpointName);
+            }
 
-        if (limit != null) {
-            sql.append(" LIMIT ").append(limit);
-        }
+            sql.append(" ORDER BY ").append(ProfileTaskRecord.START_TIME).append(" DESC ");
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                final LinkedList<ProfileTask> tasks = new LinkedList<>();
-                while (resultSet.next()) {
-                    tasks.add(parseTask(resultSet));
-                }
-                return tasks;
+            if (limit != null) {
+                sql.append(" LIMIT ").append(limit);
             }
-        } catch (SQLException e) {
-            throw new IOException(e);
+
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sql.toString(),
+                    resultSet -> {
+                        final var tasks = new ArrayList<ProfileTask>();
+                        while (resultSet.next()) {
+                            tasks.add(parseTask(resultSet));
+                        }
+                        return tasks;
+                    },
+                    condition.toArray(new Object[0]))
+            );
         }
+        return limit == null ?
+            results :
+            results
+                .stream()
+                .limit(limit)
+                .collect(toList());
     }
 
     @Override
-    public ProfileTask getById(String id) throws IOException {
+    @SneakyThrows
+    public ProfileTask getById(String id) {
         if (StringUtil.isEmpty(id)) {
             return null;
         }
 
-        final StringBuilder sql = new StringBuilder();
-        final ArrayList<Object> condition = new ArrayList<>(1);
-        sql.append("select * from ").append(ProfileTaskRecord.INDEX_NAME)
-            .append(" where " + ProfileTaskRecord.TASK_ID + "=? LIMIT 1");
-        condition.add(id);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                if (resultSet.next()) {
-                    return parseTask(resultSet);
-                }
+        final var tables = tableHelper.getTablesWithinTTL(ProfileTaskRecord.INDEX_NAME);
+        for (String table : tables) {
+            final StringBuilder sql = new StringBuilder();
+            final ArrayList<Object> condition = new ArrayList<>(1);
+            sql.append("select * from ").append(table)
+               .append(" where ")
+               .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ")
+               .append(" and ")
+               .append(ProfileTaskRecord.TASK_ID + "=? LIMIT 1");
+            condition.add(ProfileTaskRecord.INDEX_NAME);
+            condition.add(id);
+
+            final var r = jdbcClient.executeQuery(
+                sql.toString(),
+                resultSet -> {
+                    if (resultSet.next()) {
+                        return parseTask(resultSet);
+                    }
+                    return null;
+                },
+                condition.toArray(new Object[0]));
+            if (r != null) {
+                return r;
             }
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e);
         }
         return null;
     }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileThreadSnapshotQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileThreadSnapshotQueryDAO.java
index df8b14a3a3..5370a5b43b 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileThreadSnapshotQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCProfileThreadSnapshotQueryDAO.java
@@ -20,93 +20,122 @@ package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import com.google.common.base.Strings;
 import lombok.RequiredArgsConstructor;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Base64;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import org.apache.skywalking.oap.server.library.util.StringUtil;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
 import org.apache.skywalking.oap.server.core.profiling.trace.ProfileThreadSnapshotRecord;
 import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
 import org.apache.skywalking.oap.server.core.storage.profiling.trace.IProfileThreadSnapshotQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.BooleanUtils;
-import org.apache.skywalking.oap.server.library.util.CollectionUtils;
+import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Base64;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.stream.IntStream;
+
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
 
 @RequiredArgsConstructor
 public class JDBCProfileThreadSnapshotQueryDAO implements IProfileThreadSnapshotQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<BasicTrace> queryProfiledSegments(String taskId) throws IOException {
-        // search segment id list
+    @SneakyThrows
+    public List<BasicTrace> queryProfiledSegments(String taskId) {
+        final var tables = tableHelper.getTablesWithinTTL(ProfileThreadSnapshotRecord.INDEX_NAME);
+        final var results = new ArrayList<BasicTrace>();
+        final var segments = new ArrayList<>();
+
+        for (String table : tables) {
+            segments.addAll(querySegments(taskId, table));
+        }
+
+        if (segments.isEmpty()) {
+            return Collections.emptyList();
+        }
+
+        final var segmentTables = tableHelper.getTablesWithinTTL(SegmentRecord.INDEX_NAME);
+        for (String table : segmentTables) {
+            final var sql = new StringBuilder();
+            final var parameters = new ArrayList<>();
+
+            sql.append("select * from ").append(table).append(" where ")
+                .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? and ");
+            parameters.add(SegmentRecord.INDEX_NAME);
+
+            final var segmentQuery =
+                segments
+                    .stream()
+                    .map(it -> SegmentRecord.SEGMENT_ID + " = ? ")
+                    .collect(joining(" or ", "(", ")"));
+            sql.append(segmentQuery);
+            parameters.addAll(segments);
+            sql.append(" order by ").append(SegmentRecord.START_TIME).append(" ").append("desc");
+
+            final var sqlAndParameters = new SQLAndParameters(sql.toString(), parameters);
+
+            jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        BasicTrace basicTrace = new BasicTrace();
+
+                        basicTrace.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
+                        basicTrace.setStart(resultSet.getString(SegmentRecord.START_TIME));
+                        basicTrace.getEndpointNames().add(
+                            IDManager.EndpointID.analysisId(
+                                resultSet.getString(SegmentRecord.ENDPOINT_ID)).getEndpointName()
+                        );
+                        basicTrace.setDuration(resultSet.getInt(SegmentRecord.LATENCY));
+                        basicTrace.setError(BooleanUtils.valueToBoolean(resultSet.getInt(SegmentRecord.IS_ERROR)));
+                        String traceIds = resultSet.getString(SegmentRecord.TRACE_ID);
+                        basicTrace.getTraceIds().add(traceIds);
+
+                        results.add(basicTrace);
+                    }
+                    return null;
+                },
+                sqlAndParameters.parameters());
+        }
+        return results
+            .stream()
+            .sorted(Comparator.<BasicTrace, Long>comparing(it -> Long.parseLong(it.getStart())).reversed())
+            .collect(toList());
+    }
+
+    protected ArrayList<String> querySegments(String taskId, String table) throws SQLException {
         StringBuilder sql = new StringBuilder();
         sql.append("select ")
            .append(ProfileThreadSnapshotRecord.SEGMENT_ID)
            .append(" from ")
-           .append(ProfileThreadSnapshotRecord.INDEX_NAME);
+           .append(table);
 
         sql.append(" where ")
+           .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?")
+           .append(" and ")
            .append(ProfileThreadSnapshotRecord.TASK_ID)
            .append(" = ? and ")
            .append(ProfileThreadSnapshotRecord.SEQUENCE)
            .append(" = 0");
 
-        final LinkedList<String> segments = new LinkedList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), taskId)) {
-                while (resultSet.next()) {
-                    segments.add(resultSet.getString(ProfileThreadSnapshotRecord.SEGMENT_ID));
-                }
+        return jdbcClient.executeQuery(sql.toString(), resultSet -> {
+            final var segments = new ArrayList<String>();
+            while (resultSet.next()) {
+                segments.add(resultSet.getString(ProfileThreadSnapshotRecord.SEGMENT_ID));
             }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-
-        if (CollectionUtils.isEmpty(segments)) {
-            return Collections.emptyList();
-        }
-
-        // search traces
-        sql = new StringBuilder();
-        sql.append("select * from ").append(SegmentRecord.INDEX_NAME).append(" where ");
-        for (int i = 0; i < segments.size(); i++) {
-            sql.append(i > 0 ? " or " : "").append(SegmentRecord.SEGMENT_ID).append(" = ? ");
-        }
-        sql.append(" order by ").append(SegmentRecord.START_TIME).append(" ").append("desc");
-
-        ArrayList<BasicTrace> result = new ArrayList<>(segments.size());
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), segments.toArray(new String[segments.size()]))) {
-                while (resultSet.next()) {
-                    BasicTrace basicTrace = new BasicTrace();
-
-                    basicTrace.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
-                    basicTrace.setStart(resultSet.getString(SegmentRecord.START_TIME));
-                    basicTrace.getEndpointNames().add(
-                        IDManager.EndpointID.analysisId(
-                            resultSet.getString(SegmentRecord.ENDPOINT_ID)).getEndpointName()
-                    );
-                    basicTrace.setDuration(resultSet.getInt(SegmentRecord.LATENCY));
-                    basicTrace.setError(BooleanUtils.valueToBoolean(resultSet.getInt(SegmentRecord.IS_ERROR)));
-                    String traceIds = resultSet.getString(SegmentRecord.TRACE_ID);
-                    basicTrace.getTraceIds().add(traceIds);
-
-                    result.add(basicTrace);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-        return result;
+            return segments;
+        }, ProfileThreadSnapshotRecord.INDEX_NAME, taskId);
     }
 
     @Override
@@ -120,26 +149,29 @@ public class JDBCProfileThreadSnapshotQueryDAO implements IProfileThreadSnapshot
     }
 
     @Override
+    @SneakyThrows
     public List<ProfileThreadSnapshotRecord> queryRecords(String segmentId,
                                                           int minSequence,
                                                           int maxSequence) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        sql.append("select * from ").append(ProfileThreadSnapshotRecord.INDEX_NAME).append(" where ");
-        sql.append(" 1=1 ");
-        sql.append(" and ").append(ProfileThreadSnapshotRecord.SEGMENT_ID).append(" = ? ");
-        sql.append(" and ").append(ProfileThreadSnapshotRecord.SEQUENCE).append(" >= ? ");
-        sql.append(" and ").append(ProfileThreadSnapshotRecord.SEQUENCE).append(" < ? ");
-
-        Object[] params = new Object[] {
-            segmentId,
-            minSequence,
-            maxSequence
-        };
-
-        ArrayList<ProfileThreadSnapshotRecord> result = new ArrayList<>(maxSequence - minSequence);
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), params)) {
+        final var tables = tableHelper.getTablesWithinTTL(ProfileThreadSnapshotRecord.INDEX_NAME);
+        final var results = new ArrayList<ProfileThreadSnapshotRecord>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            sql.append("select * from ").append(table).append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+            sql.append(" and ").append(ProfileThreadSnapshotRecord.SEGMENT_ID).append(" = ? ");
+            sql.append(" and ").append(ProfileThreadSnapshotRecord.SEQUENCE).append(" >= ? ");
+            sql.append(" and ").append(ProfileThreadSnapshotRecord.SEQUENCE).append(" < ? ");
+
+            Object[] params = new Object[]{
+                ProfileThreadSnapshotRecord.INDEX_NAME,
+                segmentId,
+                minSequence,
+                maxSequence
+            };
+
+            jdbcClient.executeQuery(sql.toString(), resultSet -> {
                 while (resultSet.next()) {
                     ProfileThreadSnapshotRecord record = new ProfileThreadSnapshotRecord();
 
@@ -152,78 +184,92 @@ public class JDBCProfileThreadSnapshotQueryDAO implements IProfileThreadSnapshot
                         record.setStackBinary(Base64.getDecoder().decode(dataBinaryBase64));
                     }
 
-                    result.add(record);
+                    results.add(record);
                 }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+                return null;
+            }, params);
         }
 
-        return result;
+        return results;
     }
 
     @Override
+    @SneakyThrows
     public SegmentRecord getProfiledSegment(String segmentId) throws IOException {
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, "select * from " + SegmentRecord.INDEX_NAME + " where " + SegmentRecord.SEGMENT_ID + " = ?",
-                segmentId
-            )) {
-                if (resultSet.next()) {
-                    SegmentRecord segmentRecord = new SegmentRecord();
-                    segmentRecord.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
-                    segmentRecord.setTraceId(resultSet.getString(SegmentRecord.TRACE_ID));
-                    segmentRecord.setServiceId(resultSet.getString(SegmentRecord.SERVICE_ID));
-                    segmentRecord.setServiceInstanceId(resultSet.getString(SegmentRecord.SERVICE_INSTANCE_ID));
-                    segmentRecord.setStartTime(resultSet.getLong(SegmentRecord.START_TIME));
-                    segmentRecord.setLatency(resultSet.getInt(SegmentRecord.LATENCY));
-                    segmentRecord.setIsError(resultSet.getInt(SegmentRecord.IS_ERROR));
-                    String dataBinaryBase64 = resultSet.getString(SegmentRecord.DATA_BINARY);
-                    if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
-                        segmentRecord.setDataBinary(Base64.getDecoder().decode(dataBinaryBase64));
+        final var tables = tableHelper.getTablesWithinTTL(SegmentRecord.INDEX_NAME);
+        for (final var table : tables) {
+            final var r = jdbcClient.executeQuery(
+                "select * from " + table +
+                    " where " + JDBCTableInstaller.TABLE_COLUMN + " = ?" +
+                    " and " + SegmentRecord.SEGMENT_ID + " = ?",
+                resultSet -> {
+                    if (resultSet.next()) {
+                        SegmentRecord segmentRecord = new SegmentRecord();
+                        segmentRecord.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
+                        segmentRecord.setTraceId(resultSet.getString(SegmentRecord.TRACE_ID));
+                        segmentRecord.setServiceId(resultSet.getString(SegmentRecord.SERVICE_ID));
+                        segmentRecord.setServiceInstanceId(resultSet.getString(SegmentRecord.SERVICE_INSTANCE_ID));
+                        segmentRecord.setStartTime(resultSet.getLong(SegmentRecord.START_TIME));
+                        segmentRecord.setLatency(resultSet.getInt(SegmentRecord.LATENCY));
+                        segmentRecord.setIsError(resultSet.getInt(SegmentRecord.IS_ERROR));
+                        String dataBinaryBase64 = resultSet.getString(SegmentRecord.DATA_BINARY);
+                        if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
+                            segmentRecord.setDataBinary(Base64.getDecoder().decode(dataBinaryBase64));
+                        }
+                        return segmentRecord;
                     }
-                    return segmentRecord;
-                }
+                    return null;
+                },
+                SegmentRecord.INDEX_NAME, segmentId
+            );
+            if (r != null) {
+                return r;
             }
-        } catch (SQLException e) {
-            throw new IOException(e);
         }
-
         return null;
     }
 
+    @SneakyThrows
     private int querySequenceWithAgg(String aggType, String segmentId, long start, long end) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        sql.append("select ")
-           .append(aggType)
-           .append("(")
-           .append(ProfileThreadSnapshotRecord.SEQUENCE)
-           .append(") from ")
-           .append(ProfileThreadSnapshotRecord.INDEX_NAME)
-           .append(" where ");
-        sql.append(" 1=1 ");
-        sql.append(" and ").append(ProfileThreadSnapshotRecord.SEGMENT_ID).append(" = ? ");
-        sql.append(" and ").append(ProfileThreadSnapshotRecord.DUMP_TIME).append(" >= ? ");
-        sql.append(" and ").append(ProfileThreadSnapshotRecord.DUMP_TIME).append(" <= ? ");
-
-        Object[] params = new Object[] {
-            segmentId,
-            start,
-            end
-        };
-
-        try (Connection connection = jdbcClient.getConnection()) {
-
-            try (ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), params)) {
-                while (resultSet.next()) {
-                    return resultSet.getInt(1);
+        final var tables = tableHelper.getTablesWithinTTL(ProfileThreadSnapshotRecord.INDEX_NAME);
+
+        var result = IntStream.builder();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            sql.append("select ")
+               .append(aggType)
+               .append("(")
+               .append(ProfileThreadSnapshotRecord.SEQUENCE)
+               .append(") from ")
+               .append(table)
+               .append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            sql.append(" and ").append(ProfileThreadSnapshotRecord.SEGMENT_ID).append(" = ? ");
+            sql.append(" and ").append(ProfileThreadSnapshotRecord.DUMP_TIME).append(" >= ? ");
+            sql.append(" and ").append(ProfileThreadSnapshotRecord.DUMP_TIME).append(" <= ? ");
+
+            Object[] params = new Object[]{
+                ProfileThreadSnapshotRecord.INDEX_NAME,
+                segmentId,
+                start,
+                end
+            };
+
+            jdbcClient.executeQuery(sql.toString(), resultSet -> {
+                if (resultSet.next()) {
+                    result.add(resultSet.getInt(1));
                 }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+                return null;
+            }, params);
+        }
+        switch (aggType) {
+            case "min":
+                return result.build().min().orElse(-1);
+            case "max":
+                return result.build().max().orElse(-1);
+            default:
+                throw new UnsupportedOperationException("Unsupported agg type: " + aggType);
         }
-        return -1;
     }
-
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordDAO.java
index f382f746e1..1c2467a8d9 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordDAO.java
@@ -18,23 +18,24 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
+import lombok.RequiredArgsConstructor;
 import org.apache.skywalking.oap.server.core.analysis.record.Record;
 import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.type.HashMapConverter;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
-import lombok.RequiredArgsConstructor;
+
+import java.io.IOException;
 
 @RequiredArgsConstructor
 public class JDBCRecordDAO extends JDBCSQLExecutor implements IRecordDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
     private final StorageBuilder<Record> storageBuilder;
 
     @Override
     public InsertRequest prepareBatchInsert(Model model, Record record) throws IOException {
-        return getInsertExecutor(model.getName(), record, storageBuilder, new HashMapConverter.ToStorage(), null);
+        return getInsertExecutor(model, record, record.getTimeBucket(), storageBuilder, new HashMapConverter.ToStorage(), null);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordsQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordsQueryDAO.java
index 1a445ac51f..ee57e16659 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordsQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCRecordsQueryDAO.java
@@ -18,33 +18,72 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.topn.TopN;
 import org.apache.skywalking.oap.server.core.query.enumeration.Order;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.input.RecordCondition;
 import org.apache.skywalking.oap.server.core.query.type.Record;
 import org.apache.skywalking.oap.server.core.storage.query.IRecordsQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
-import lombok.RequiredArgsConstructor;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
+import java.util.List;
 
 @RequiredArgsConstructor
 public class JDBCRecordsQueryDAO implements IRecordsQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
+    @SneakyThrows
     public List<Record> readRecords(final RecordCondition condition,
                                     final String valueColumnName,
-                                    final Duration duration) throws IOException {
-        StringBuilder sql = new StringBuilder("select * from " + condition.getName() + " where ");
+                                    final Duration duration) {
+        final var tables = tableHelper.getTablesForRead(
+            condition.getName(),
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var results = new ArrayList<Record>();
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQL(condition, valueColumnName, duration, table);
+            jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        Record record = new Record();
+                        record.setName(resultSet.getString(TopN.STATEMENT));
+                        final String refId = resultSet.getString(TopN.TRACE_ID);
+                        record.setRefId(StringUtil.isEmpty(refId) ? "" : refId);
+                        record.setId(record.getRefId());
+                        record.setValue(String.valueOf(resultSet.getInt(valueColumnName)));
+                        results.add(record);
+                    }
+                    return null;
+                },
+                sqlAndParameters.parameters());
+        }
+
+        return results;
+    }
+
+    protected static SQLAndParameters buildSQL(
+        RecordCondition condition,
+        String valueColumnName,
+        Duration duration,
+        String table) {
+        StringBuilder sql = new StringBuilder("select * from " + table + " where ");
         List<Object> parameters = new ArrayList<>(10);
-        sql.append(" ").append(TopN.ENTITY_ID).append(" = ? and");
+        sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+        parameters.add(condition.getName());
+        sql.append(" and ").append(TopN.ENTITY_ID).append(" = ? and");
         parameters.add(condition.getParentEntity().buildId());
         sql.append(" ").append(TopN.TIME_BUCKET).append(" >= ?");
         parameters.add(duration.getStartTimeBucketInSec());
@@ -59,25 +98,6 @@ public class JDBCRecordsQueryDAO implements IRecordsQueryDAO {
         }
         sql.append(" limit ").append(condition.getTopN());
 
-        List<Record> results = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, sql.toString(), parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    Record record = new Record();
-                    record.setName(resultSet.getString(TopN.STATEMENT));
-                    final String refId = resultSet.getString(TopN.TRACE_ID);
-                    record.setRefId(StringUtil.isEmpty(refId) ? "" : refId);
-                    record.setId(record.getRefId());
-                    record.setValue(resultSet.getString(valueColumnName));
-                    results.add(record);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
-
-        return results;
+        return new SQLAndParameters(sql.toString(), parameters);
     }
-
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSQLExecutor.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSQLExecutor.java
index 6db79f18b2..5697712a1f 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSQLExecutor.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSQLExecutor.java
@@ -18,88 +18,74 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import lombok.SneakyThrows;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.skywalking.oap.server.core.UnexpectedException;
 import org.apache.skywalking.oap.server.core.storage.SessionCacheCallback;
 import org.apache.skywalking.oap.server.core.storage.StorageData;
+import org.apache.skywalking.oap.server.core.storage.model.ColumnName;
 import org.apache.skywalking.oap.server.core.storage.model.Model;
 import org.apache.skywalking.oap.server.core.storage.model.ModelColumn;
-import org.apache.skywalking.oap.server.core.storage.model.SQLDatabaseModelExtension;
 import org.apache.skywalking.oap.server.core.storage.type.Convert2Storage;
 import org.apache.skywalking.oap.server.core.storage.type.HashMapConverter;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
 import org.apache.skywalking.oap.server.core.storage.type.StorageDataComplexObject;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLBuilder;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.TableMetaInfo;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 @Slf4j
 public class JDBCSQLExecutor {
-    protected <T extends StorageData> List<StorageData> getByIDs(JDBCHikariCPClient h2Client,
+    protected <T extends StorageData> List<StorageData> getByIDs(JDBCClient h2Client,
                                                                  String modelName,
-                                                                 String[] ids,
-                                                                 StorageBuilder<T> storageBuilder) throws IOException {
+                                                                 List<String> ids,
+                                                                 StorageBuilder<T> storageBuilder) throws Exception {
+        final var tables = getModelTables(h2Client, modelName);
+        final var storageDataList = new ArrayList<StorageData>();
 
-        try (Connection connection = h2Client.getConnection()) {
-            SQLBuilder sql = new SQLBuilder("SELECT * FROM " + modelName + " WHERE id in (");
-            List<Object> parameters = new ArrayList<>(ids.length);
-            for (int i = 0; i < ids.length; i++) {
-                if (i == 0) {
-                    sql.append("?");
-                } else {
-                    sql.append(",?");
-                }
-                parameters.add(ids[i]);
-            }
-            sql.append(")");
-            try (ResultSet rs = h2Client.executeQuery(connection, sql.toString(), parameters.toArray(new Object[0]))) {
+        for (var table : tables) {
+            final var sql = new SQLBuilder("SELECT * FROM " + table + " WHERE id in ")
+                .append(ids.stream().map(it -> "?").collect(Collectors.joining(",", "(", ")")));
+            h2Client.executeQuery(sql.toString(), resultSet -> {
                 StorageData storageData;
-                List<StorageData> storageDataList = new ArrayList<>();
-                do {
-                    storageData = toStorageData(rs, modelName, storageBuilder);
-                    if (storageData != null) {
-                        storageDataList.add(storageData);
-                    }
+                while ((storageData = toStorageData(resultSet, modelName, storageBuilder)) != null) {
+                    storageDataList.add(storageData);
                 }
-                while (storageData != null);
 
-                return storageDataList;
-            }
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e.getMessage(), e);
+                return null;
+            }, ids.toArray());
         }
+        return storageDataList;
     }
 
-    protected <T extends StorageData> StorageData getByID(JDBCHikariCPClient h2Client, String modelName, String id,
-                                                          StorageBuilder<T> storageBuilder) throws IOException {
-        try (Connection connection = h2Client.getConnection();
-             ResultSet rs = h2Client.executeQuery(connection, "SELECT * FROM " + modelName + " WHERE id = ?", id)) {
-            return toStorageData(rs, modelName, storageBuilder);
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e.getMessage(), e);
-        }
-    }
-
-    protected StorageData getByColumn(JDBCHikariCPClient h2Client, String modelName, String columnName, Object value,
-                                      StorageBuilder<? extends StorageData> storageBuilder) throws IOException {
-        try (Connection connection = h2Client.getConnection();
-             ResultSet rs = h2Client.executeQuery(
-                 connection, "SELECT * FROM " + modelName + " WHERE " + columnName + " = ?", value)) {
-            return toStorageData(rs, modelName, storageBuilder);
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e.getMessage(), e);
+    @SneakyThrows
+    protected <T extends StorageData> StorageData getByID(JDBCClient h2Client, String modelName, String id,
+                                                          StorageBuilder<T> storageBuilder) {
+        final var tables = getModelTables(h2Client, modelName);
+        for (var table : tables) {
+            final var result = h2Client.executeQuery(
+                "SELECT * FROM " + table + " WHERE id = ?",
+                resultSet -> toStorageData(resultSet, modelName, storageBuilder),
+                TableHelper.generateId(modelName, id)
+            );
+            if (result != null) {
+                return result;
+            }
         }
+        return null;
     }
 
     protected StorageData toStorageData(ResultSet rs, String modelName,
@@ -115,11 +101,11 @@ public class JDBCSQLExecutor {
         return null;
     }
 
-    protected <T extends StorageData> SQLExecutor getInsertExecutor(String modelName, T metrics,
+    protected <T extends StorageData> SQLExecutor getInsertExecutor(Model model, T metrics,
+                                                                    long timeBucket,
                                                                     StorageBuilder<T> storageBuilder,
                                                                     Convert2Storage<Map<String, Object>> converter,
                                                                     SessionCacheCallback callback) throws IOException {
-        Model model = TableMetaInfo.get(modelName);
         storageBuilder.entity2Storage(metrics, converter);
         Map<String, Object> objectMap = converter.obtain();
         //build main table sql
@@ -128,64 +114,76 @@ public class JDBCSQLExecutor {
             mainEntity.put(column.getColumnName().getName(), objectMap.get(column.getColumnName().getName()));
         });
         SQLExecutor sqlExecutor = buildInsertExecutor(
-            modelName, model.getColumns(), metrics, mainEntity, callback);
+            model, metrics, timeBucket, mainEntity, callback);
         //build additional table sql
-        for (SQLDatabaseModelExtension.AdditionalTable additionalTable : model.getSqlDBModelExtension()
-                                                                              .getAdditionalTables()
-                                                                              .values()) {
+        for (final var additionalTable : model.getSqlDBModelExtension().getAdditionalTables().values()) {
             Map<String, Object> additionalEntity = new HashMap<>();
             additionalTable.getColumns().forEach(column -> {
                 additionalEntity.put(column.getColumnName().getName(), objectMap.get(column.getColumnName().getName()));
             });
 
             List<SQLExecutor> additionalSQLExecutors = buildAdditionalInsertExecutor(
-                additionalTable.getName(), additionalTable.getColumns(), metrics, additionalEntity, callback
+                model, additionalTable.getName(), additionalTable.getColumns(), metrics,
+                timeBucket, additionalEntity, callback
             );
             sqlExecutor.appendAdditionalSQLs(additionalSQLExecutors);
         }
         return sqlExecutor;
     }
 
-    private <T extends StorageData> SQLExecutor buildInsertExecutor(String tableName,
-                                                                    List<ModelColumn> columns,
+    private <T extends StorageData> SQLExecutor buildInsertExecutor(Model model,
                                                                     T metrics,
+                                                                    long timeBucket,
                                                                     Map<String, Object> objectMap,
-                                                                    SessionCacheCallback onCompleteCallback) throws IOException {
-        SQLBuilder sqlBuilder = new SQLBuilder("INSERT INTO " + tableName + " VALUES");
-        List<Object> param = new ArrayList<>();
-        sqlBuilder.append("(?,");
-        param.add(metrics.id().build());
-        for (int i = 0; i < columns.size(); i++) {
-            ModelColumn column = columns.get(i);
-            sqlBuilder.append("?");
+                                                                    SessionCacheCallback onCompleteCallback) {
+        final var table = TableHelper.getTable(model, timeBucket);
+        final var sqlBuilder = new SQLBuilder("INSERT INTO " + table);
+        final var columns = model.getColumns();
+        final var columnNames =
+            Stream.concat(
+                      Stream.of(JDBCTableInstaller.ID_COLUMN, JDBCTableInstaller.TABLE_COLUMN),
+                      columns
+                          .stream()
+                          .map(ModelColumn::getColumnName)
+                          .map(ColumnName::getStorageName))
+                  .collect(Collectors.toList());
+        sqlBuilder.append(columnNames.stream().collect(Collectors.joining(",", "(", ")")));
+        sqlBuilder.append(" VALUES ");
+        sqlBuilder.append(columnNames.stream().map(it -> "?").collect(Collectors.joining(",", "(", ")")));
 
-            Object value = objectMap.get(column.getColumnName().getName());
-            if (value instanceof StorageDataComplexObject) {
-                param.add(((StorageDataComplexObject) value).toStorageData());
-            } else {
-                param.add(value);
-            }
-
-            if (i != columns.size() - 1) {
-                sqlBuilder.append(",");
-            }
-        }
-        sqlBuilder.append(")");
+        final var param =
+            Stream.concat(
+                      Stream.of(TableHelper.generateId(model, metrics.id().build()), model.getName()),
+                      columns
+                          .stream()
+                          .map(ModelColumn::getColumnName)
+                          .map(ColumnName::getName)
+                          .map(objectMap::get)
+                          .map(it -> {
+                              if (it instanceof StorageDataComplexObject) {
+                                  return ((StorageDataComplexObject) it).toStorageData();
+                              }
+                              return it;
+                          }))
+                  .collect(Collectors.toList());
 
         return new SQLExecutor(sqlBuilder.toString(), param, onCompleteCallback);
     }
 
-    private <T extends StorageData> List<SQLExecutor> buildAdditionalInsertExecutor(String tableName,
+    private <T extends StorageData> List<SQLExecutor> buildAdditionalInsertExecutor(Model model, String tableName,
                                                                                     List<ModelColumn> columns,
                                                                                     T metrics,
+                                                                                    long timeBucket,
                                                                                     Map<String, Object> objectMap,
-                                                                                    SessionCacheCallback callback) throws IOException {
+                                                                                    SessionCacheCallback callback) {
 
         List<SQLExecutor> sqlExecutors = new ArrayList<>();
-        SQLBuilder sqlBuilder = new SQLBuilder("INSERT INTO " + tableName + " VALUES");
+        final var sqlBuilder = new SQLBuilder("INSERT INTO ")
+            .append(TableHelper.getTable(tableName, timeBucket))
+            .append(" VALUES ");
         List<Object> param = new ArrayList<>();
         sqlBuilder.append("(?,");
-        param.add(metrics.id().build());
+        param.add(TableHelper.generateId(model, metrics.id().build()));
         int position = 0;
         List valueList = new ArrayList();
         for (int i = 0; i < columns.size(); i++) {
@@ -224,41 +222,46 @@ public class JDBCSQLExecutor {
         return sqlExecutors;
     }
 
-    protected <T extends StorageData> SQLExecutor getUpdateExecutor(String modelName, T metrics,
+    protected <T extends StorageData> SQLExecutor getUpdateExecutor(Model model, T metrics,
+                                                                    long timeBucket,
                                                                     StorageBuilder<T> storageBuilder,
-                                                                    SessionCacheCallback callback) throws IOException {
-        final HashMapConverter.ToStorage toStorage = new HashMapConverter.ToStorage();
+                                                                    SessionCacheCallback callback) {
+        final var toStorage = new HashMapConverter.ToStorage();
         storageBuilder.entity2Storage(metrics, toStorage);
-        Map<String, Object> objectMap = toStorage.obtain();
-
-        StringBuilder sqlBuilder = new StringBuilder("UPDATE " + modelName + " SET ");
-        Model model = TableMetaInfo.get(modelName);
-        List<ModelColumn> columns = model.getColumns();
-        List<Object> param = new ArrayList<>();
-        for (int i = 0; i < columns.size(); i++) {
-            ModelColumn column = columns.get(i);
-            String columnName = column.getColumnName().getName();
-            if (model.getSqlDBModelExtension().isShardingTable()) {
-                SQLDatabaseModelExtension.Sharding sharding = model.getSqlDBModelExtension().getSharding().orElseThrow(
-                    () -> new UnexpectedException("Sharding should not be empty."));
-                if (columnName.equals(sharding.getDataSourceShardingColumn()) || columnName.equals(
-                    sharding.getTableShardingColumn())) {
-                    continue;
-                }
-            }
-            sqlBuilder.append(column.getColumnName().getStorageName()).append("= ?,");
+        final var objectMap = toStorage.obtain();
+        final var table = TableHelper.getTable(model, timeBucket);
+        final var sqlBuilder = new StringBuilder("UPDATE " + table + " SET ");
+        final var columns = model.getColumns();
+        final var queries = new ArrayList<String>();
+        final var param = new ArrayList<>();
+        for (final var column : columns) {
+            final var columnName = column.getColumnName().getName();
+            queries.add(column.getColumnName().getStorageName() + " = ?");
 
-            Object value = objectMap.get(columnName);
+            final var value = objectMap.get(columnName);
             if (value instanceof StorageDataComplexObject) {
                 param.add(((StorageDataComplexObject) value).toStorageData());
             } else {
                 param.add(value);
             }
         }
-        sqlBuilder.replace(sqlBuilder.length() - 1, sqlBuilder.length(), "");
+        sqlBuilder.append(queries.stream().collect(Collectors.joining(", ")));
         sqlBuilder.append(" WHERE id = ?");
-        param.add(metrics.id().build());
+        param.add(TableHelper.generateId(model, metrics.id().build()));
 
         return new SQLExecutor(sqlBuilder.toString(), param, callback);
     }
+
+    private static ArrayList<String> getModelTables(JDBCClient h2Client, String modelName) throws Exception {
+        final var model = TableMetaInfo.get(modelName);
+        final var tableNamePattern = TableHelper.getTableName(model) + "%";
+        final var tables = new ArrayList<String>();
+        try (final var connection = h2Client.getConnection();
+             final var resultSet = connection.getMetaData().getTables(connection.getCatalog(), null, tableNamePattern, new String[]{"TABLE"})) {
+            while (resultSet.next()) {
+                tables.add(resultSet.getString("TABLE_NAME"));
+            }
+        }
+        return tables;
+    }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCServiceLabelQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCServiceLabelQueryDAO.java
index fe12d9430c..cd5c6ef955 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCServiceLabelQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCServiceLabelQueryDAO.java
@@ -17,39 +17,51 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import lombok.AllArgsConstructor;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.manual.process.ServiceLabelRecord;
 import org.apache.skywalking.oap.server.core.storage.profiling.ebpf.IServiceLabelDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 
-import java.io.IOException;
-import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 
-@AllArgsConstructor
+@RequiredArgsConstructor
 public class JDBCServiceLabelQueryDAO implements IServiceLabelDAO {
-    private JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<String> queryAllLabels(String serviceId) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(1);
-        sql.append("select " + ServiceLabelRecord.LABEL + " from ")
-                .append(ServiceLabelRecord.INDEX_NAME)
-                .append(" where ").append(ServiceLabelRecord.SERVICE_ID).append(" = ?");
-        condition.add(serviceId);
-
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                    connection, sql.toString(), condition.toArray(new Object[0]))) {
-                return parseLabels(resultSet);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public List<String> queryAllLabels(String serviceId) {
+        final var tables = tableHelper.getTablesWithinTTL(ServiceLabelRecord.INDEX_NAME);
+        final var results = new ArrayList<String>();
+
+        for (String table : tables) {
+            final StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(1);
+            sql.append("select " + ServiceLabelRecord.LABEL + " from ")
+               .append(table)
+               .append(" where ")
+               .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?")
+               .append(" and ").append(ServiceLabelRecord.SERVICE_ID).append(" = ?");
+            condition.add(ServiceLabelRecord.INDEX_NAME);
+            condition.add(serviceId);
+
+            results.addAll(
+                jdbcClient.executeQuery(
+                    sql.toString(),
+                    this::parseLabels,
+                    condition.toArray(new Object[0])
+                )
+            );
         }
+
+        return results;
     }
 
     private List<String> parseLabels(ResultSet resultSet) throws SQLException {
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSpanAttachedEventQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSpanAttachedEventQueryDAO.java
index fb12dac655..3680ee2560 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSpanAttachedEventQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCSpanAttachedEventQueryDAO.java
@@ -19,70 +19,91 @@
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.manual.spanattach.SpanAttachedEventRecord;
 import org.apache.skywalking.oap.server.core.analysis.manual.spanattach.SpanAttachedEventTraceType;
 import org.apache.skywalking.oap.server.core.storage.query.ISpanAttachedEventQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Base64;
 import java.util.List;
 
+import static java.util.Comparator.comparing;
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
+
 @RequiredArgsConstructor
 public class JDBCSpanAttachedEventQueryDAO implements ISpanAttachedEventQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<SpanAttachedEventRecord> querySpanAttachedEvents(SpanAttachedEventTraceType type, List<String> traceIds) throws IOException {
-        StringBuilder sql = new StringBuilder("select * from " + SpanAttachedEventRecord.INDEX_NAME + " where ");
-        List<Object> parameters = new ArrayList<>(traceIds.size() + 1);
+    @SneakyThrows
+    public List<SpanAttachedEventRecord> querySpanAttachedEvents(SpanAttachedEventTraceType type, List<String> traceIds) {
+        final var tables = tableHelper.getTablesWithinTTL(SpanAttachedEventRecord.INDEX_NAME);
+        final var results = new ArrayList<SpanAttachedEventRecord>();
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQL(type, traceIds, table);
+
+            jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        SpanAttachedEventRecord record = new SpanAttachedEventRecord();
+                        record.setStartTimeSecond(resultSet.getLong(SpanAttachedEventRecord.START_TIME_SECOND));
+                        record.setStartTimeNanos(resultSet.getInt(SpanAttachedEventRecord.START_TIME_NANOS));
+                        record.setEvent(resultSet.getString(SpanAttachedEventRecord.EVENT));
+                        record.setEndTimeSecond(resultSet.getLong(SpanAttachedEventRecord.END_TIME_SECOND));
+                        record.setEndTimeNanos(resultSet.getInt(SpanAttachedEventRecord.END_TIME_NANOS));
+                        record.setTraceRefType(resultSet.getInt(SpanAttachedEventRecord.TRACE_REF_TYPE));
+                        record.setRelatedTraceId(resultSet.getString(SpanAttachedEventRecord.RELATED_TRACE_ID));
+                        record.setTraceSegmentId(resultSet.getString(SpanAttachedEventRecord.TRACE_SEGMENT_ID));
+                        record.setTraceSpanId(resultSet.getString(SpanAttachedEventRecord.TRACE_SPAN_ID));
+                        String dataBinaryBase64 = resultSet.getString(SpanAttachedEventRecord.DATA_BINARY);
+                        if (StringUtil.isNotEmpty(dataBinaryBase64)) {
+                            record.setDataBinary(Base64.getDecoder().decode(dataBinaryBase64));
+                        }
+                        results.add(record);
+                    }
 
-        sql.append(" ").append(SpanAttachedEventRecord.RELATED_TRACE_ID).append(" in (");
-        for (int i = 0; i < traceIds.size(); i++) {
-            if (i == 0) {
-                sql.append("?");
-            } else {
-                sql.append(",?");
-            }
-            parameters.add(traceIds.get(i));
+                    return null;
+                },
+                sqlAndParameters.parameters());
         }
-        sql.append(") and ").append(SpanAttachedEventRecord.TRACE_REF_TYPE).append(" = ?");
+        return results
+            .stream()
+            .sorted(comparing(SpanAttachedEventRecord::getStartTimeSecond).thenComparing(SpanAttachedEventRecord::getStartTimeNanos))
+            .collect(toList());
+    }
+
+    private static SQLAndParameters buildSQL(SpanAttachedEventTraceType type, List<String> traceIds, String table) {
+        final var sql = new StringBuilder("select * from " + table + " where ");
+        final var parameters = new ArrayList<>(traceIds.size() + 1);
+
+        sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+        parameters.add(SpanAttachedEventRecord.INDEX_NAME);
+
+        sql.append(" and ").append(SpanAttachedEventRecord.RELATED_TRACE_ID).append(" in ");
+        sql.append(
+            traceIds
+                .stream()
+                .map(it -> "?")
+                .collect(joining(",", "(", ")"))
+        );
+        parameters.addAll(traceIds);
+
+        sql.append(" and ").append(SpanAttachedEventRecord.TRACE_REF_TYPE).append(" = ?");
         parameters.add(type.value());
 
         sql.append(" order by ").append(SpanAttachedEventRecord.START_TIME_SECOND)
-                .append(",").append(SpanAttachedEventRecord.START_TIME_NANOS).append(" ASC ");
-
-        List<SpanAttachedEventRecord> results = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                    connection, sql.toString(), parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    SpanAttachedEventRecord record = new SpanAttachedEventRecord();
-                    record.setStartTimeSecond(resultSet.getLong(SpanAttachedEventRecord.START_TIME_SECOND));
-                    record.setStartTimeNanos(resultSet.getInt(SpanAttachedEventRecord.START_TIME_NANOS));
-                    record.setEvent(resultSet.getString(SpanAttachedEventRecord.EVENT));
-                    record.setEndTimeSecond(resultSet.getLong(SpanAttachedEventRecord.END_TIME_SECOND));
-                    record.setEndTimeNanos(resultSet.getInt(SpanAttachedEventRecord.END_TIME_NANOS));
-                    record.setTraceRefType(resultSet.getInt(SpanAttachedEventRecord.TRACE_REF_TYPE));
-                    record.setRelatedTraceId(resultSet.getString(SpanAttachedEventRecord.RELATED_TRACE_ID));
-                    record.setTraceSegmentId(resultSet.getString(SpanAttachedEventRecord.TRACE_SEGMENT_ID));
-                    record.setTraceSpanId(resultSet.getString(SpanAttachedEventRecord.TRACE_SPAN_ID));
-                    String dataBinaryBase64 = resultSet.getString(SpanAttachedEventRecord.DATA_BINARY);
-                    if (StringUtil.isNotEmpty(dataBinaryBase64)) {
-                        record.setDataBinary(Base64.getDecoder().decode(dataBinaryBase64));
-                    }
-                    results.add(record);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+           .append(",").append(SpanAttachedEventRecord.START_TIME_NANOS).append(" ASC ");
 
-        return results;
+        return new SQLAndParameters(sql.toString(), parameters);
     }
 }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCStorageDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCStorageDAO.java
index e14a83463d..d9a4c6844d 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCStorageDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCStorageDAO.java
@@ -29,11 +29,11 @@ import org.apache.skywalking.oap.server.core.storage.INoneStreamDAO;
 import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
 import org.apache.skywalking.oap.server.core.storage.StorageDAO;
 import org.apache.skywalking.oap.server.core.storage.type.StorageBuilder;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 
 @RequiredArgsConstructor
 public class JDBCStorageDAO implements StorageDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
 
     @Override
     public IMetricsDAO newMetricsDao(StorageBuilder storageBuilder) {
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTagAutoCompleteQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTagAutoCompleteQueryDAO.java
index a4d2ad7fc9..6564e26bdf 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTagAutoCompleteQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTagAutoCompleteQueryDAO.java
@@ -18,74 +18,107 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
 import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.TagAutocompleteData;
 import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.TagType;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.storage.query.ITagAutoCompleteQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.SQLAndParameters;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
 import static java.util.Objects.nonNull;
 
 @RequiredArgsConstructor
 public class JDBCTagAutoCompleteQueryDAO implements ITagAutoCompleteQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
+    @SneakyThrows
     public Set<String> queryTagAutocompleteKeys(final TagType tagType,
                                                 final int limit,
-                                                final Duration duration) throws IOException {
+                                                final Duration duration) {
+        final var tables = tableHelper.getTablesForRead(
+            TagAutocompleteData.INDEX_NAME,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var results = new HashSet<String>();
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQLForQueryKeys(tagType, limit, duration, table);
+            jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        results.add(resultSet.getString(TagAutocompleteData.TAG_KEY));
+                    }
+                    return null;
+                },
+                sqlAndParameters.parameters());
+        }
+        return results;
+    }
+
+    protected SQLAndParameters buildSQLForQueryKeys(TagType tagType, int limit, Duration duration, String table) {
         StringBuilder sql = new StringBuilder();
         List<Object> condition = new ArrayList<>(2);
 
         sql.append("select distinct ").append(TagAutocompleteData.TAG_KEY).append(" from ")
-           .append(TagAutocompleteData.INDEX_NAME).append(" where ");
+           .append(table).append(" where ");
         sql.append(" 1=1 ");
         appendTagAutocompleteCondition(tagType, duration, sql, condition);
         sql.append(" limit ").append(limit);
-        try (Connection connection = jdbcClient.getConnection()) {
-            ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), condition.toArray(new Object[0]));
-            Set<String> tagKeys = new HashSet<>();
-            while (resultSet.next()) {
-                tagKeys.add(resultSet.getString(TagAutocompleteData.TAG_KEY));
-            }
-            return tagKeys;
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+
+        return new SQLAndParameters(sql.toString(), condition);
     }
 
     @Override
+    @SneakyThrows
     public Set<String> queryTagAutocompleteValues(final TagType tagType,
                                                   final String tagKey,
                                                   final int limit,
-                                                  final Duration duration) throws IOException {
+                                                  final Duration duration) {
+        final var tables = tableHelper.getTablesForRead(
+            TagAutocompleteData.INDEX_NAME,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var results = new HashSet<String>();
+
+        for (String table : tables) {
+            final var sqlAndParameters = buildSQLForQueryValues(tagType, tagKey, limit, duration, table);
+            jdbcClient.executeQuery(
+                sqlAndParameters.sql(),
+                resultSet -> {
+                    while (resultSet.next()) {
+                        results.add(resultSet.getString(TagAutocompleteData.TAG_VALUE));
+                    }
+                    return null;
+                },
+                sqlAndParameters.parameters()
+            );
+        }
+        return results;
+    }
+
+    protected SQLAndParameters buildSQLForQueryValues(TagType tagType, String tagKey, int limit, Duration duration, String table) {
         StringBuilder sql = new StringBuilder();
         List<Object> condition = new ArrayList<>(3);
-        sql.append("select * from ").append(TagAutocompleteData.INDEX_NAME).append(" where ");
+        sql.append("select * from ").append(table).append(" where ");
         sql.append(TagAutocompleteData.TAG_KEY).append(" = ?");
         condition.add(tagKey);
         appendTagAutocompleteCondition(tagType, duration, sql, condition);
         sql.append(" limit ").append(limit);
 
-        try (Connection connection = jdbcClient.getConnection()) {
-            ResultSet resultSet = jdbcClient.executeQuery(connection, sql.toString(), condition.toArray(new Object[0]));
-            Set<String> tagValues = new HashSet<>();
-            while (resultSet.next()) {
-                tagValues.add(resultSet.getString(TagAutocompleteData.TAG_VALUE));
-            }
-            return tagValues;
-        } catch (SQLException e) {
-            throw new IOException(e);
-        }
+        return new SQLAndParameters(sql.toString(), condition);
     }
 
     private void appendTagAutocompleteCondition(final TagType tagType,
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTopologyQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTopologyQueryDAO.java
index 15a9a8e517..d6d841b853 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTopologyQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTopologyQueryDAO.java
@@ -18,13 +18,8 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
 import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.analysis.manual.relation.endpoint.EndpointRelationServerSideMetrics;
 import org.apache.skywalking.oap.server.core.analysis.manual.relation.instance.ServiceInstanceRelationClientSideMetrics;
 import org.apache.skywalking.oap.server.core.analysis.manual.relation.instance.ServiceInstanceRelationServerSideMetrics;
@@ -38,11 +33,20 @@ import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.type.Call;
 import org.apache.skywalking.oap.server.core.source.DetectPoint;
 import org.apache.skywalking.oap.server.core.storage.query.ITopologyQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
 
 @RequiredArgsConstructor
 public class JDBCTopologyQueryDAO implements ITopologyQueryDAO {
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
     @Override
     public List<Call.CallDetail> loadServiceRelationsDetectedAtServerSide(Duration duration,
@@ -132,46 +136,56 @@ public class JDBCTopologyQueryDAO implements ITopologyQueryDAO {
         return loadProcessFromSide(duration, serviceInstanceId, DetectPoint.SERVER);
     }
 
+    @SneakyThrows
     private List<Call.CallDetail> loadServiceCalls(String tableName,
                                                    Duration duration,
                                                    String sourceCName,
                                                    String destCName,
                                                    List<String> serviceIds,
-                                                   DetectPoint detectPoint) throws IOException {
-        Object[] conditions = new Object[serviceIds.size() * 2 + 2];
-        conditions[0] = duration.getStartTimeBucket();
-        conditions[1] = duration.getEndTimeBucket();
-        StringBuilder serviceIdMatchSql = new StringBuilder();
-        if (serviceIds.size() > 0) {
-            serviceIdMatchSql.append("and (");
-            for (int i = 0; i < serviceIds.size(); i++) {
-                serviceIdMatchSql.append(sourceCName + "=? or " + destCName + "=? ");
-                conditions[i * 2 + 2] = serviceIds.get(i);
-                conditions[i * 2 + 1 + 2] = serviceIds.get(i);
-                if (i != serviceIds.size() - 1) {
-                    serviceIdMatchSql.append("or ");
+                                                   DetectPoint detectPoint) {
+        final var tables = tableHelper.getTablesForRead(
+            tableName,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        final var calls = new ArrayList<Call.CallDetail>();
+
+        for (String table : tables) {
+            Object[] conditions = new Object[serviceIds.size() * 2 + 3];
+            conditions[0] = tableName;
+            conditions[1] = duration.getStartTimeBucket();
+            conditions[2] = duration.getEndTimeBucket();
+            StringBuilder serviceIdMatchSql = new StringBuilder();
+            if (serviceIds.size() > 0) {
+                serviceIdMatchSql.append("and (");
+                for (int i = 0; i < serviceIds.size(); i++) {
+                    serviceIdMatchSql.append(sourceCName + "=? or " + destCName + "=? ");
+                    conditions[i * 2 + 3] = serviceIds.get(i);
+                    conditions[i * 2 + 1 + 3] = serviceIds.get(i);
+                    if (i != serviceIds.size() - 1) {
+                        serviceIdMatchSql.append("or ");
+                    }
                 }
+                serviceIdMatchSql.append(")");
             }
-            serviceIdMatchSql.append(")");
-        }
-        List<Call.CallDetail> calls = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection,
+            jdbcClient.executeQuery(
                 "select " + Metrics.ENTITY_ID + ", " + ServiceRelationServerSideMetrics.COMPONENT_IDS
-                    + " from " + tableName + " where " + Metrics.TIME_BUCKET + ">= ? and "
-                    + Metrics.TIME_BUCKET + "<=? " + serviceIdMatchSql
-                    .toString() +
-                    " group by " + Metrics.ENTITY_ID + "," + ServiceRelationServerSideMetrics.COMPONENT_IDS, conditions
-            )) {
-                buildServiceCalls(resultSet, calls, detectPoint);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+                    + " from " + table + " where " + JDBCTableInstaller.TABLE_COLUMN + " = ? and "
+                    + Metrics.TIME_BUCKET + ">= ? and "
+                    + Metrics.TIME_BUCKET + "<=? " + serviceIdMatchSql +
+                    " group by " + Metrics.ENTITY_ID + "," + ServiceRelationServerSideMetrics.COMPONENT_IDS,
+                resultSet -> {
+                    buildServiceCalls(resultSet, calls, detectPoint);
+                    return null;
+                },
+                conditions
+            );
         }
+
         return calls;
     }
 
+    @SneakyThrows
     private List<Call.CallDetail> loadServiceInstanceCalls(String tableName,
                                                            Duration duration,
                                                            String sourceCName,
@@ -179,92 +193,120 @@ public class JDBCTopologyQueryDAO implements ITopologyQueryDAO {
                                                            String sourceServiceId,
                                                            String destServiceId,
                                                            DetectPoint detectPoint) throws IOException {
-        Object[] conditions = new Object[] {
+        final var tables = tableHelper.getTablesForRead(
+            tableName,
             duration.getStartTimeBucket(),
-            duration.getEndTimeBucket(),
-            sourceServiceId,
-            destServiceId,
-            destServiceId,
-            sourceServiceId
-        };
-        StringBuilder serviceIdMatchSql = new StringBuilder("and ((").append(sourceCName)
-                                                                     .append("=? and ")
-                                                                     .append(descCName)
-                                                                     .append("=?")
-                                                                     .append(") or (")
-                                                                     .append(sourceCName)
-                                                                     .append("=? and ")
-                                                                     .append(descCName)
-                                                                     .append("=?")
-                                                                     .append("))");
+            duration.getEndTimeBucket()
+        );
         List<Call.CallDetail> calls = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection,
+
+        for (String table : tables) {
+            Object[] conditions = new Object[]{
+                tableName,
+                duration.getStartTimeBucket(),
+                duration.getEndTimeBucket(),
+                sourceServiceId,
+                destServiceId,
+                destServiceId,
+                sourceServiceId
+            };
+            StringBuilder serviceIdMatchSql = new StringBuilder("and ((").append(sourceCName)
+                                                                         .append("=? and ")
+                                                                         .append(descCName)
+                                                                         .append("=?")
+                                                                         .append(") or (")
+                                                                         .append(sourceCName)
+                                                                         .append("=? and ")
+                                                                         .append(descCName)
+                                                                         .append("=?")
+                                                                         .append("))");
+            jdbcClient.executeQuery(
                 "select " + Metrics.ENTITY_ID
-                    + " from " + tableName + " where " + Metrics.TIME_BUCKET + ">= ? and " + Metrics.TIME_BUCKET + "<=? " + serviceIdMatchSql
-                    .toString() + " group by " + Metrics.ENTITY_ID,
+                    + " from " + table + " where " + JDBCTableInstaller.TABLE_COLUMN + " = ? and "
+                    + Metrics.TIME_BUCKET + ">= ? and "
+                    + Metrics.TIME_BUCKET + "<=? " + serviceIdMatchSql + " group by " + Metrics.ENTITY_ID,
+                resultSet -> {
+                    buildInstanceCalls(resultSet, calls, detectPoint);
+                    return null;
+                },
                 conditions
-            )) {
-                buildInstanceCalls(resultSet, calls, detectPoint);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            );
         }
         return calls;
     }
 
+    @SneakyThrows
     private List<Call.CallDetail> loadEndpointFromSide(String tableName,
                                                        Duration duration,
                                                        String sourceCName,
                                                        String destCName,
                                                        String id,
                                                        boolean isSourceId) throws IOException {
-        Object[] conditions = new Object[3];
-        conditions[0] = duration.getStartTimeBucket();
-        conditions[1] = duration.getEndTimeBucket();
-        conditions[2] = id;
         List<Call.CallDetail> calls = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection,
-                "select " + Metrics.ENTITY_ID + " from " + tableName
-                    + " where " + Metrics.TIME_BUCKET + ">= ? and " + Metrics.TIME_BUCKET + "<=? and "
+
+        final var tables = tableHelper.getTablesForRead(
+            tableName,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+        for (String table : tables) {
+            Object[] conditions = new Object[4];
+            conditions[0] = tableName;
+            conditions[1] = duration.getStartTimeBucket();
+            conditions[2] = duration.getEndTimeBucket();
+            conditions[3] = id;
+            jdbcClient.executeQuery(
+                "select " + Metrics.ENTITY_ID + " from " + table
+                    + " where " + JDBCTableInstaller.TABLE_COLUMN + " = ? and "
+                    + Metrics.TIME_BUCKET + ">= ? and " + Metrics.TIME_BUCKET + "<=? and "
                     + (isSourceId ? sourceCName : destCName) + "=?"
                     + " group by " + Metrics.ENTITY_ID,
+                resultSet -> {
+                    buildEndpointCalls(resultSet, calls, DetectPoint.SERVER);
+                    return null;
+                },
                 conditions
-            )) {
-                buildEndpointCalls(resultSet, calls, DetectPoint.SERVER);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            );
         }
         return calls;
     }
 
+    @SneakyThrows
     private List<Call.CallDetail> loadProcessFromSide(Duration duration,
                                                        String instanceId,
                                                        DetectPoint detectPoint) throws IOException {
-        Object[] conditions = new Object[3];
-        conditions[0] = duration.getStartTimeBucket();
-        conditions[1] = duration.getEndTimeBucket();
-        conditions[2] = instanceId;
+        final var tableName = detectPoint == DetectPoint.SERVER ?
+            ProcessRelationServerSideMetrics.INDEX_NAME :
+            ProcessRelationClientSideMetrics.INDEX_NAME;
+        final var tables = tableHelper.getTablesForRead(
+            tableName,
+            duration.getStartTimeBucket(),
+            duration.getEndTimeBucket()
+        );
+
         List<Call.CallDetail> calls = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection,
-                "select " + Metrics.ENTITY_ID +  ", " + ProcessRelationServerSideMetrics.COMPONENT_ID
-                    + " from " + (detectPoint == DetectPoint.SERVER ? ProcessRelationServerSideMetrics.INDEX_NAME : ProcessRelationClientSideMetrics.INDEX_NAME)
-                    + " where " + Metrics.TIME_BUCKET + ">= ? and " + Metrics.TIME_BUCKET + "<=? and "
+
+        for (String table : tables) {
+            Object[] conditions = new Object[4];
+            conditions[0] = tableName;
+            conditions[1] = duration.getStartTimeBucket();
+            conditions[2] = duration.getEndTimeBucket();
+            conditions[3] = instanceId;
+            jdbcClient.executeQuery(
+                "select " + Metrics.ENTITY_ID + ", " + ProcessRelationServerSideMetrics.COMPONENT_ID
+                    + " from " + table
+                    + " where " + JDBCTableInstaller.TABLE_COLUMN + " = ? and "
+                    + Metrics.TIME_BUCKET + ">= ? and " + Metrics.TIME_BUCKET + "<=? and "
                     + ProcessRelationClientSideMetrics.SERVICE_INSTANCE_ID + "=?"
                     + " group by " + Metrics.ENTITY_ID + ", " + ProcessRelationServerSideMetrics.COMPONENT_ID,
+                resultSet -> {
+                    buildProcessCalls(resultSet, calls, detectPoint);
+                    return null;
+                },
                 conditions
-            )) {
-                buildProcessCalls(resultSet, calls, detectPoint);
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+            );
         }
+
         return calls;
     }
 
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTraceQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTraceQueryDAO.java
index 3c10b814e8..ad5fc9a811 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTraceQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCTraceQueryDAO.java
@@ -20,45 +20,53 @@ package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
 import com.google.common.base.Strings;
 import lombok.RequiredArgsConstructor;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Base64;
-import java.util.Collections;
-import java.util.List;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.Const;
 import org.apache.skywalking.oap.server.core.CoreModule;
-import org.apache.skywalking.oap.server.core.config.ConfigService;
-import org.apache.skywalking.oap.server.core.query.input.Duration;
-import org.apache.skywalking.oap.server.library.util.StringUtil;
 import org.apache.skywalking.oap.server.core.analysis.IDManager;
 import org.apache.skywalking.oap.server.core.analysis.manual.searchtag.Tag;
 import org.apache.skywalking.oap.server.core.analysis.manual.segment.SegmentRecord;
+import org.apache.skywalking.oap.server.core.config.ConfigService;
+import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.query.type.BasicTrace;
 import org.apache.skywalking.oap.server.core.query.type.QueryOrder;
 import org.apache.skywalking.oap.server.core.query.type.Span;
 import org.apache.skywalking.oap.server.core.query.type.TraceBrief;
 import org.apache.skywalking.oap.server.core.query.type.TraceState;
 import org.apache.skywalking.oap.server.core.storage.query.ITraceQueryDAO;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.module.ModuleManager;
 import org.apache.skywalking.oap.server.library.util.BooleanUtils;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
+import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Base64;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
 import static java.util.Objects.nonNull;
-import static org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.H2TableInstaller.ID_COLUMN;
+import static java.util.function.Predicate.not;
+import static java.util.stream.Collectors.toSet;
 
+@Slf4j
 @RequiredArgsConstructor
 public class JDBCTraceQueryDAO implements ITraceQueryDAO {
     private final ModuleManager manager;
-    private final JDBCHikariCPClient jdbcClient;
+    private final JDBCClient jdbcClient;
+    private final TableHelper tableHelper;
 
-    private List<String> searchableTagKeys;
+    private Set<String> searchableTagKeys;
 
     @Override
+    @SneakyThrows
     public TraceBrief queryBasicTraces(Duration duration,
                                        long minDuration,
                                        long maxDuration,
@@ -71,132 +79,140 @@ public class JDBCTraceQueryDAO implements ITraceQueryDAO {
                                        TraceState traceState,
                                        QueryOrder queryOrder,
                                        final List<Tag> tags) throws IOException {
+        if (searchableTagKeys == null) {
+            final ConfigService configService = manager.find(CoreModule.NAME)
+                                                       .provider()
+                                                       .getService(ConfigService.class);
+            searchableTagKeys = new HashSet<>(Arrays.asList(configService.getSearchableTracesTags().split(Const.COMMA)));
+        }
+        if (tags != null && !searchableTagKeys.containsAll(tags.stream().map(Tag::getKey).collect(toSet()))) {
+            log.warn(
+                "Searching tags that are not searchable: {}",
+                tags.stream().map(Tag::getKey).filter(not(searchableTagKeys::contains)).collect(toSet()));
+            return new TraceBrief();
+        }
+
         long startSecondTB = 0;
         long endSecondTB = 0;
         if (nonNull(duration)) {
             startSecondTB = duration.getStartTimeBucketInSec();
             endSecondTB = duration.getEndTimeBucketInSec();
         }
-        if (searchableTagKeys == null) {
-            final ConfigService configService = manager.find(CoreModule.NAME)
-                                                       .provider()
-                                                       .getService(ConfigService.class);
-            searchableTagKeys = Arrays.asList(configService.getSearchableTracesTags().split(Const.COMMA));
-        }
 
-        StringBuilder sql = new StringBuilder();
-        List<Object> parameters = new ArrayList<>(10);
-
-        sql.append("from ").append(SegmentRecord.INDEX_NAME);
-
-        /**
-         * This is an AdditionalEntity feature, see:
-         * {@link org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase.AdditionalEntity}
-         */
-        if (!CollectionUtils.isEmpty(tags)) {
-            for (int i = 0; i < tags.size(); i++) {
-                sql.append(" inner join ").append(SegmentRecord.ADDITIONAL_TAG_TABLE).append(" ");
-                sql.append(SegmentRecord.ADDITIONAL_TAG_TABLE + i);
-                sql.append(" on ").append(SegmentRecord.INDEX_NAME).append(".").append(ID_COLUMN).append(" = ");
-                sql.append(SegmentRecord.ADDITIONAL_TAG_TABLE + i).append(".").append(ID_COLUMN);
+        final var tables = startSecondTB > 0 && endSecondTB > 0 ?
+            tableHelper.getTablesForRead(SegmentRecord.INDEX_NAME, startSecondTB, endSecondTB) :
+            tableHelper.getTablesWithinTTL(SegmentRecord.INDEX_NAME);
+        final var traces = new ArrayList<BasicTrace>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> parameters = new ArrayList<>(10);
+
+            sql.append("from ").append(table);
+
+            /*
+             * This is an AdditionalEntity feature, see:
+             * {@link org.apache.skywalking.oap.server.core.storage.annotation.SQLDatabase.AdditionalEntity}
+             */
+            final var timeBucket = TableHelper.getTimeBucket(table);
+            final var tagTable = TableHelper.getTable(SegmentRecord.ADDITIONAL_TAG_TABLE, timeBucket);
+            if (!CollectionUtils.isEmpty(tags)) {
+                for (int i = 0; i < tags.size(); i++) {
+                    sql.append(" inner join ").append(tagTable).append(" ");
+                    sql.append(tagTable + i);
+                    sql.append(" on ").append(table).append(".").append(JDBCTableInstaller.ID_COLUMN).append(" = ");
+                    sql.append(tagTable + i).append(".").append(JDBCTableInstaller.ID_COLUMN);
+                }
             }
-        }
-        sql.append(" where ");
-        sql.append(" 1=1 ");
-        if (startSecondTB != 0 && endSecondTB != 0) {
-            sql.append(" and ").append(SegmentRecord.INDEX_NAME).append(".").append(SegmentRecord.TIME_BUCKET).append(" >= ?");
-            parameters.add(startSecondTB);
-            sql.append(" and ").append(SegmentRecord.INDEX_NAME).append(".").append(SegmentRecord.TIME_BUCKET).append(" <= ?");
-            parameters.add(endSecondTB);
-        }
-        if (minDuration != 0) {
-            sql.append(" and ").append(SegmentRecord.LATENCY).append(" >= ?");
-            parameters.add(minDuration);
-        }
-        if (maxDuration != 0) {
-            sql.append(" and ").append(SegmentRecord.LATENCY).append(" <= ?");
-            parameters.add(maxDuration);
-        }
-        if (StringUtil.isNotEmpty(serviceId)) {
-            sql.append(" and ").append(SegmentRecord.INDEX_NAME).append(".").append(SegmentRecord.SERVICE_ID).append(" = ?");
-            parameters.add(serviceId);
-        }
-        if (StringUtil.isNotEmpty(serviceInstanceId)) {
-            sql.append(" and ").append(SegmentRecord.SERVICE_INSTANCE_ID).append(" = ?");
-            parameters.add(serviceInstanceId);
-        }
-        if (!Strings.isNullOrEmpty(endpointId)) {
-            sql.append(" and ").append(SegmentRecord.ENDPOINT_ID).append(" = ?");
-            parameters.add(endpointId);
-        }
-        if (!Strings.isNullOrEmpty(traceId)) {
-            sql.append(" and ").append(SegmentRecord.TRACE_ID).append(" = ?");
-            parameters.add(traceId);
-        }
-        if (CollectionUtils.isNotEmpty(tags)) {
-            for (int i = 0; i < tags.size(); i++) {
-                final int foundIdx = searchableTagKeys.indexOf(tags.get(i).getKey());
-                if (foundIdx > -1) {
-                    sql.append(" and ").append(SegmentRecord.ADDITIONAL_TAG_TABLE + i).append(".");
+            sql.append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            parameters.add(SegmentRecord.INDEX_NAME);
+            if (startSecondTB != 0 && endSecondTB != 0) {
+                sql.append(" and ").append(table).append(".").append(SegmentRecord.TIME_BUCKET).append(" >= ?");
+                parameters.add(startSecondTB);
+                sql.append(" and ").append(table).append(".").append(SegmentRecord.TIME_BUCKET).append(" <= ?");
+                parameters.add(endSecondTB);
+            }
+            if (minDuration != 0) {
+                sql.append(" and ").append(SegmentRecord.LATENCY).append(" >= ?");
+                parameters.add(minDuration);
+            }
+            if (maxDuration != 0) {
+                sql.append(" and ").append(SegmentRecord.LATENCY).append(" <= ?");
+                parameters.add(maxDuration);
+            }
+            if (StringUtil.isNotEmpty(serviceId)) {
+                sql.append(" and ").append(table).append(".").append(SegmentRecord.SERVICE_ID).append(" = ?");
+                parameters.add(serviceId);
+            }
+            if (StringUtil.isNotEmpty(serviceInstanceId)) {
+                sql.append(" and ").append(SegmentRecord.SERVICE_INSTANCE_ID).append(" = ?");
+                parameters.add(serviceInstanceId);
+            }
+            if (!Strings.isNullOrEmpty(endpointId)) {
+                sql.append(" and ").append(SegmentRecord.ENDPOINT_ID).append(" = ?");
+                parameters.add(endpointId);
+            }
+            if (!Strings.isNullOrEmpty(traceId)) {
+                sql.append(" and ").append(SegmentRecord.TRACE_ID).append(" = ?");
+                parameters.add(traceId);
+            }
+            if (CollectionUtils.isNotEmpty(tags)) {
+                for (int i = 0; i < tags.size(); i++) {
+                    sql.append(" and ").append(tagTable + i).append(".");
                     sql.append(SegmentRecord.TAGS).append(" = ?");
                     parameters.add(tags.get(i).toString());
-                } else {
-                    //If the tag is not searchable, but is required, then don't need to run the real query.
-                    return new TraceBrief();
                 }
             }
-        }
-        switch (traceState) {
-            case ERROR:
-                sql.append(" and ").append(SegmentRecord.IS_ERROR).append(" = ").append(BooleanUtils.TRUE);
-                break;
-            case SUCCESS:
-                sql.append(" and ").append(SegmentRecord.IS_ERROR).append(" = ").append(BooleanUtils.FALSE);
-                break;
-        }
-        switch (queryOrder) {
-            case BY_START_TIME:
-                sql.append(" order by ").append(SegmentRecord.START_TIME).append(" ").append("desc");
-                break;
-            case BY_DURATION:
-                sql.append(" order by ").append(SegmentRecord.LATENCY).append(" ").append("desc");
-                break;
-        }
-
-        TraceBrief traceBrief = new TraceBrief();
-        try (Connection connection = jdbcClient.getConnection()) {
+            switch (traceState) {
+                case ERROR:
+                    sql.append(" and ").append(SegmentRecord.IS_ERROR).append(" = ").append(BooleanUtils.TRUE);
+                    break;
+                case SUCCESS:
+                    sql.append(" and ").append(SegmentRecord.IS_ERROR).append(" = ").append(BooleanUtils.FALSE);
+                    break;
+            }
+            switch (queryOrder) {
+                case BY_START_TIME:
+                    sql.append(" order by ").append(SegmentRecord.START_TIME).append(" ").append("desc");
+                    break;
+                case BY_DURATION:
+                    sql.append(" order by ").append(SegmentRecord.LATENCY).append(" ").append("desc");
+                    break;
+            }
 
             buildLimit(sql, from, limit);
 
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, "select " +
+            jdbcClient.executeQuery(
+                "select " +
                     SegmentRecord.SEGMENT_ID + ", " +
                     SegmentRecord.START_TIME + ", " +
                     SegmentRecord.ENDPOINT_ID + ", " +
                     SegmentRecord.LATENCY + ", " +
                     SegmentRecord.IS_ERROR + ", " +
-                    SegmentRecord.TRACE_ID + " " + sql, parameters.toArray(new Object[0]))) {
-                while (resultSet.next()) {
-                    BasicTrace basicTrace = new BasicTrace();
-
-                    basicTrace.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
-                    basicTrace.setStart(resultSet.getString(SegmentRecord.START_TIME));
-                    basicTrace.getEndpointNames().add(
-                        IDManager.EndpointID.analysisId(resultSet.getString(SegmentRecord.ENDPOINT_ID))
-                                            .getEndpointName()
-                    );
-                    basicTrace.setDuration(resultSet.getInt(SegmentRecord.LATENCY));
-                    basicTrace.setError(BooleanUtils.valueToBoolean(resultSet.getInt(SegmentRecord.IS_ERROR)));
-                    String traceIds = resultSet.getString(SegmentRecord.TRACE_ID);
-                    basicTrace.getTraceIds().add(traceIds);
-                    traceBrief.getTraces().add(basicTrace);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+                    SegmentRecord.TRACE_ID + " " + sql,
+                resultSet -> {
+                    while (resultSet.next()) {
+                        BasicTrace basicTrace = new BasicTrace();
+
+                        basicTrace.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
+                        basicTrace.setStart(resultSet.getString(SegmentRecord.START_TIME));
+                        basicTrace.getEndpointNames().add(
+                            IDManager.EndpointID.analysisId(resultSet.getString(SegmentRecord.ENDPOINT_ID))
+                                                .getEndpointName()
+                        );
+                        basicTrace.setDuration(resultSet.getInt(SegmentRecord.LATENCY));
+                        basicTrace.setError(BooleanUtils.valueToBoolean(resultSet.getInt(SegmentRecord.IS_ERROR)));
+                        String traceIds = resultSet.getString(SegmentRecord.TRACE_ID);
+                        basicTrace.getTraceIds().add(traceIds);
+                        traces.add(basicTrace);
+                    }
+                    return null;
+                },
+                parameters.toArray(new Object[0]));
         }
 
-        return traceBrief;
+        return new TraceBrief(traces); // TODO: sort,
     }
 
     protected void buildLimit(StringBuilder sql, int from, int limit) {
@@ -205,39 +221,43 @@ public class JDBCTraceQueryDAO implements ITraceQueryDAO {
     }
 
     @Override
+    @SneakyThrows
     public List<SegmentRecord> queryByTraceId(String traceId) throws IOException {
-        List<SegmentRecord> segmentRecords = new ArrayList<>();
-        try (Connection connection = jdbcClient.getConnection()) {
+        final var tables = tableHelper.getTablesWithinTTL(SegmentRecord.INDEX_NAME);
+        final var segmentRecords = new ArrayList<SegmentRecord>();
 
-            try (ResultSet resultSet = jdbcClient.executeQuery(
-                connection, "select " + SegmentRecord.SEGMENT_ID + ", " +
+        for (String table : tables) {
+            jdbcClient.executeQuery(
+                "select " + SegmentRecord.SEGMENT_ID + ", " +
                     SegmentRecord.TRACE_ID + ", " +
                     SegmentRecord.SERVICE_ID + ", " +
                     SegmentRecord.SERVICE_INSTANCE_ID + ", " +
                     SegmentRecord.START_TIME + ", " +
                     SegmentRecord.LATENCY + ", " +
                     SegmentRecord.IS_ERROR + ", " +
-                    SegmentRecord.DATA_BINARY + " from " +
-                    SegmentRecord.INDEX_NAME + " where " + SegmentRecord.TRACE_ID + " = ?", traceId
-            )) {
-                while (resultSet.next()) {
-                    SegmentRecord segmentRecord = new SegmentRecord();
-                    segmentRecord.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
-                    segmentRecord.setTraceId(resultSet.getString(SegmentRecord.TRACE_ID));
-                    segmentRecord.setServiceId(resultSet.getString(SegmentRecord.SERVICE_ID));
-                    segmentRecord.setServiceInstanceId(resultSet.getString(SegmentRecord.SERVICE_INSTANCE_ID));
-                    segmentRecord.setStartTime(resultSet.getLong(SegmentRecord.START_TIME));
-                    segmentRecord.setLatency(resultSet.getInt(SegmentRecord.LATENCY));
-                    segmentRecord.setIsError(resultSet.getInt(SegmentRecord.IS_ERROR));
-                    String dataBinaryBase64 = resultSet.getString(SegmentRecord.DATA_BINARY);
-                    if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
-                        segmentRecord.setDataBinary(Base64.getDecoder().decode(dataBinaryBase64));
+                    SegmentRecord.DATA_BINARY + " from " + table + " where " +
+                    JDBCTableInstaller.TABLE_COLUMN + " = ? and " +
+                    SegmentRecord.TRACE_ID + " = ?",
+                resultSet -> {
+                    while (resultSet.next()) {
+                        SegmentRecord segmentRecord = new SegmentRecord();
+                        segmentRecord.setSegmentId(resultSet.getString(SegmentRecord.SEGMENT_ID));
+                        segmentRecord.setTraceId(resultSet.getString(SegmentRecord.TRACE_ID));
+                        segmentRecord.setServiceId(resultSet.getString(SegmentRecord.SERVICE_ID));
+                        segmentRecord.setServiceInstanceId(resultSet.getString(SegmentRecord.SERVICE_INSTANCE_ID));
+                        segmentRecord.setStartTime(resultSet.getLong(SegmentRecord.START_TIME));
+                        segmentRecord.setLatency(resultSet.getInt(SegmentRecord.LATENCY));
+                        segmentRecord.setIsError(resultSet.getInt(SegmentRecord.IS_ERROR));
+                        String dataBinaryBase64 = resultSet.getString(SegmentRecord.DATA_BINARY);
+                        if (!Strings.isNullOrEmpty(dataBinaryBase64)) {
+                            segmentRecord.setDataBinary(Base64.getDecoder().decode(dataBinaryBase64));
+                        }
+                        segmentRecords.add(segmentRecord);
                     }
-                    segmentRecords.add(segmentRecord);
-                }
-            }
-        } catch (SQLException e) {
-            throw new IOException(e);
+                    return null;
+                },
+                SegmentRecord.INDEX_NAME, traceId
+            );
         }
         return segmentRecords;
     }
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCUITemplateManagementDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCUITemplateManagementDAO.java
index 9267efd21c..81b04c33e5 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCUITemplateManagementDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCUITemplateManagementDAO.java
@@ -18,13 +18,8 @@
 
 package org.apache.skywalking.oap.server.storage.plugin.jdbc.common.dao;
 
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
 import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.skywalking.oap.server.core.management.ui.template.UITemplate;
 import org.apache.skywalking.oap.server.core.query.input.DashboardSetting;
@@ -32,56 +27,78 @@ import org.apache.skywalking.oap.server.core.query.type.DashboardConfiguration;
 import org.apache.skywalking.oap.server.core.query.type.TemplateChangeStatus;
 import org.apache.skywalking.oap.server.core.storage.management.UITemplateManagementDAO;
 import org.apache.skywalking.oap.server.core.storage.type.HashMapConverter;
-import org.apache.skywalking.oap.server.library.client.jdbc.JDBCClientException;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.BooleanUtils;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
 import org.apache.skywalking.oap.server.storage.plugin.jdbc.SQLExecutor;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.TableMetaInfo;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
 
 @Slf4j
 @RequiredArgsConstructor
 public class JDBCUITemplateManagementDAO extends JDBCSQLExecutor implements UITemplateManagementDAO {
-    private final JDBCHikariCPClient h2Client;
+    private final JDBCClient h2Client;
+    private final TableHelper tableHelper;
 
     @Override
-    public DashboardConfiguration getTemplate(final String id) throws IOException {
+    @SneakyThrows
+    public DashboardConfiguration getTemplate(final String id) {
         if (StringUtil.isEmpty(id)) {
             return null;
         }
-        final StringBuilder sql = new StringBuilder();
-        final ArrayList<Object> condition = new ArrayList<>(1);
-        sql.append("select * from ").append(UITemplate.INDEX_NAME).append(" where id=? LIMIT 1 ");
-        condition.add(id);
 
-        try (Connection connection = h2Client.getConnection()) {
-            try (ResultSet rs = h2Client.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
+        final var tables = tableHelper.getTablesWithinTTL(UITemplate.INDEX_NAME);
+
+        for (String table : tables) {
+            final StringBuilder sql = new StringBuilder();
+            final ArrayList<Object> condition = new ArrayList<>(1);
+            sql.append("select * from ").append(table).append(" where ")
+               .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?")
+               .append(" and id=? LIMIT 1 ");
+            condition.add(UITemplate.INDEX_NAME);
+            condition.add(id);
+
+            final var result = h2Client.executeQuery(sql.toString(), resultSet -> {
                 final UITemplate.Builder builder = new UITemplate.Builder();
-                UITemplate uiTemplate = (UITemplate) toStorageData(rs, UITemplate.INDEX_NAME, builder);
+                UITemplate uiTemplate = (UITemplate) toStorageData(resultSet, UITemplate.INDEX_NAME, builder);
                 if (uiTemplate != null) {
                     return new DashboardConfiguration().fromEntity(uiTemplate);
                 }
+                return null;
+            }, condition.toArray(new Object[0]));
+            if (result != null) {
+                return result;
             }
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e);
         }
+
         return null;
     }
 
     @Override
-    public List<DashboardConfiguration> getAllTemplates(Boolean includingDisabled) throws IOException {
-        final StringBuilder sql = new StringBuilder();
-        final ArrayList<Object> condition = new ArrayList<>(1);
-        sql.append("select * from ").append(UITemplate.INDEX_NAME).append(" where 1=1 ");
-        if (!includingDisabled) {
-            sql.append(" and ").append(UITemplate.DISABLED).append("=?");
-            condition.add(BooleanUtils.booleanToValue(includingDisabled));
-        }
+    @SneakyThrows
+    public List<DashboardConfiguration> getAllTemplates(Boolean includingDisabled) {
+        final var tables = tableHelper.getTablesWithinTTL(UITemplate.INDEX_NAME);
+        final var configs = new ArrayList<DashboardConfiguration>();
 
-        try (Connection connection = h2Client.getConnection()) {
-            try (ResultSet resultSet = h2Client.executeQuery(
-                connection, sql.toString(), condition.toArray(new Object[0]))) {
-                final List<DashboardConfiguration> configs = new ArrayList<>();
+        for (String table : tables) {
+            final StringBuilder sql = new StringBuilder();
+            final ArrayList<Object> condition = new ArrayList<>(1);
+            sql.append("select * from ").append(table).append(" where ")
+               .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ? ");
+            condition.add(UITemplate.INDEX_NAME);
+            if (!includingDisabled) {
+                sql.append(" and ").append(UITemplate.DISABLED).append("=?");
+                condition.add(BooleanUtils.booleanToValue(includingDisabled));
+            }
+
+            h2Client.executeQuery(sql.toString(), resultSet -> {
                 final UITemplate.Builder builder = new UITemplate.Builder();
                 UITemplate uiTemplate = null;
                 do {
@@ -91,22 +108,23 @@ public class JDBCUITemplateManagementDAO extends JDBCSQLExecutor implements UITe
                     }
                 }
                 while (uiTemplate != null);
-                return configs;
-            }
-        } catch (SQLException | JDBCClientException e) {
-            throw new IOException(e);
+                return null;
+            }, condition.toArray(new Object[0]));
         }
+
+        return configs;
     }
 
     @Override
     public TemplateChangeStatus addTemplate(final DashboardSetting setting) throws IOException {
-        final UITemplate uiTemplate = setting.toEntity();
+        final var uiTemplate = setting.toEntity();
+        final var model = TableMetaInfo.get(UITemplate.INDEX_NAME);
         final SQLExecutor insertExecutor = getInsertExecutor(
-            UITemplate.INDEX_NAME, uiTemplate, new UITemplate.Builder(), new HashMapConverter.ToStorage(), null);
+            model, uiTemplate, 0, new UITemplate.Builder(), new HashMapConverter.ToStorage(), null);
         try (Connection connection = h2Client.getConnection()) {
             insertExecutor.invoke(connection);
             return TemplateChangeStatus.builder().status(true).id(setting.getId()).build();
-        } catch (SQLException | JDBCClientException e) {
+        } catch (SQLException e) {
             log.error(e.getMessage(), e);
             return TemplateChangeStatus.builder()
                                        .status(false)
@@ -134,12 +152,13 @@ public class JDBCUITemplateManagementDAO extends JDBCSQLExecutor implements UITe
     }
 
     private TemplateChangeStatus executeUpdate(final UITemplate uiTemplate) throws IOException {
-        final SQLExecutor updateExecutor = getUpdateExecutor(
-            UITemplate.INDEX_NAME, uiTemplate, new UITemplate.Builder(), null);
+        final var model = TableMetaInfo.get(UITemplate.INDEX_NAME);
+        final var updateExecutor = getUpdateExecutor(
+            model, uiTemplate, 0, new UITemplate.Builder(), null);
         try (Connection connection = h2Client.getConnection()) {
             updateExecutor.invoke(connection);
             return TemplateChangeStatus.builder().status(true).id(uiTemplate.getTemplateId()).build();
-        } catch (SQLException | JDBCClientException e) {
+        } catch (SQLException e) {
             log.error(e.getMessage(), e);
             return TemplateChangeStatus.builder()
                                        .status(false)
diff --git a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCZipkinQueryDAO.java b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCZipkinQueryDAO.java
index 476b4d0f9f..ef6ece3eea 100644
--- a/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCZipkinQueryDAO.java
+++ b/oap-server/server-storage-plugin/storage-jdbc-hikaricp-plugin/src/main/java/org/apache/skywalking/oap/server/storage/plugin/jdbc/common/dao/JDBCZipkinQueryDAO.java
@@ -22,236 +22,287 @@ import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
 import lombok.RequiredArgsConstructor;
-import static org.apache.skywalking.oap.server.storage.plugin.jdbc.h2.H2TableInstaller.ID_COLUMN;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import lombok.SneakyThrows;
 import org.apache.skywalking.oap.server.core.query.input.Duration;
 import org.apache.skywalking.oap.server.core.storage.query.IZipkinQueryDAO;
 import org.apache.skywalking.oap.server.core.zipkin.ZipkinServiceRelationTraffic;
 import org.apache.skywalking.oap.server.core.zipkin.ZipkinServiceSpanTraffic;
 import org.apache.skywalking.oap.server.core.zipkin.ZipkinServiceTraffic;
 import org.apache.skywalking.oap.server.core.zipkin.ZipkinSpanRecord;
-import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCHikariCPClient;
+import org.apache.skywalking.oap.server.library.client.jdbc.hikaricp.JDBCClient;
 import org.apache.skywalking.oap.server.library.util.CollectionUtils;
 import org.apache.skywalking.oap.server.library.util.StringUtil;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.JDBCTableInstaller;
+import org.apache.skywalking.oap.server.storage.plugin.jdbc.common.TableHelper;
 import zipkin2.Endpoint;
 import zipkin2.Span;
 import zipkin2.storage.QueryRequest;
 
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static java.util.stream.Collectors.toList;
+
 @RequiredArgsConstructor
 public class JDBCZipkinQueryDAO implements IZipkinQueryDAO {
     private final static int NAME_QUERY_MAX_SIZE = Integer.MAX_VALUE;
     private static final Gson GSON = new Gson();
 
-    private final JDBCHikariCPClient h2Client;
+    private final JDBCClient h2Client;
+    private final TableHelper tableHelper;
 
     @Override
-    public List<String> getServiceNames() throws IOException {
-        StringBuilder sql = new StringBuilder();
-        sql.append("select ").append(ZipkinServiceTraffic.SERVICE_NAME).append(" from ").append(ZipkinServiceTraffic.INDEX_NAME);
-        sql.append(" where ").append("1=1");
-        sql.append(" limit ").append(NAME_QUERY_MAX_SIZE);
-        try (Connection connection = h2Client.getConnection()) {
-            ResultSet resultSet = h2Client.executeQuery(connection, sql.toString());
-            List<String> services = new ArrayList<>();
-            while (resultSet.next()) {
-                services.add(resultSet.getString(ZipkinServiceTraffic.SERVICE_NAME));
-            }
-            return services;
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public List<String> getServiceNames() {
+        final var tables = tableHelper.getTablesWithinTTL(ZipkinServiceTraffic.INDEX_NAME);
+        final var services = new ArrayList<String>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            sql.append("select ").append(ZipkinServiceTraffic.SERVICE_NAME).append(" from ").append(table)
+                .append(" where ")
+               .append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            sql.append(" limit ").append(NAME_QUERY_MAX_SIZE);
+            h2Client.executeQuery(sql.toString(), resultSet -> {
+                while (resultSet.next()) {
+                    services.add(resultSet.getString(ZipkinServiceTraffic.SERVICE_NAME));
+                }
+                return null;
+            }, ZipkinServiceTraffic.INDEX_NAME);
         }
+
+        return services
+            .stream()
+            .limit(NAME_QUERY_MAX_SIZE)
+            .collect(toList());
     }
 
     @Override
-    public List<String> getRemoteServiceNames(final String serviceName) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(1);
-        sql.append("select ").append(ZipkinServiceRelationTraffic.REMOTE_SERVICE_NAME).append(" from ")
-           .append(ZipkinServiceRelationTraffic.INDEX_NAME);
-        sql.append(" where ");
-        sql.append(ZipkinServiceRelationTraffic.SERVICE_NAME).append(" = ?");
-        sql.append(" limit ").append(NAME_QUERY_MAX_SIZE);
-        condition.add(serviceName);
-        try (Connection connection = h2Client.getConnection()) {
-            ResultSet resultSet = h2Client.executeQuery(connection, sql.toString(), condition.toArray(new Object[0]));
-            List<String> remoteServices = new ArrayList<>();
-            while (resultSet.next()) {
-                remoteServices.add(resultSet.getString(ZipkinServiceRelationTraffic.REMOTE_SERVICE_NAME));
-            }
-            return remoteServices;
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public List<String> getRemoteServiceNames(final String serviceName) {
+        final var tables = tableHelper.getTablesWithinTTL(ZipkinServiceRelationTraffic.INDEX_NAME);
+        final var remoteServices = new ArrayList<String>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(2);
+            sql.append("select ").append(ZipkinServiceRelationTraffic.REMOTE_SERVICE_NAME).append(" from ")
+               .append(table);
+            sql.append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(ZipkinServiceRelationTraffic.INDEX_NAME);
+            sql.append(" and ").append(ZipkinServiceRelationTraffic.SERVICE_NAME).append(" = ?");
+            sql.append(" limit ").append(NAME_QUERY_MAX_SIZE);
+            condition.add(serviceName);
+            h2Client.executeQuery(sql.toString(), resultSet -> {
+                while (resultSet.next()) {
+                    remoteServices.add(resultSet.getString(ZipkinServiceRelationTraffic.REMOTE_SERVICE_NAME));
+                }
+                return null;
+            }, condition.toArray(new Object[0]));
         }
+
+        return remoteServices
+            .stream()
+            .limit(NAME_QUERY_MAX_SIZE)
+            .collect(toList());
     }
 
     @Override
-    public List<String> getSpanNames(final String serviceName) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(1);
-        sql.append("select ").append(ZipkinServiceSpanTraffic.SPAN_NAME).append(" from ")
-           .append(ZipkinServiceSpanTraffic.INDEX_NAME);
-        sql.append(" where ");
-        sql.append(ZipkinServiceSpanTraffic.SERVICE_NAME).append(" = ?");
-        sql.append(" limit ").append(NAME_QUERY_MAX_SIZE);
-        condition.add(serviceName);
-        try (Connection connection = h2Client.getConnection()) {
-            ResultSet resultSet = h2Client.executeQuery(connection, sql.toString(), condition.toArray(new Object[0]));
-            List<String> spanNames = new ArrayList<>();
-            while (resultSet.next()) {
-                spanNames.add(resultSet.getString(ZipkinServiceSpanTraffic.SPAN_NAME));
-            }
-            return spanNames;
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public List<String> getSpanNames(final String serviceName) {
+        final var tables = tableHelper.getTablesWithinTTL(ZipkinServiceSpanTraffic.INDEX_NAME);
+        final var spanNames = new ArrayList<String>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(1);
+            sql.append("select ").append(ZipkinServiceSpanTraffic.SPAN_NAME).append(" from ")
+               .append(table);
+            sql.append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(ZipkinServiceSpanTraffic.INDEX_NAME);
+            sql.append(" and ").append(ZipkinServiceSpanTraffic.SERVICE_NAME).append(" = ?");
+            sql.append(" limit ").append(NAME_QUERY_MAX_SIZE);
+            condition.add(serviceName);
+            h2Client.executeQuery(sql.toString(), resultSet -> {
+                while (resultSet.next()) {
+                    spanNames.add(resultSet.getString(ZipkinServiceSpanTraffic.SPAN_NAME));
+                }
+                return null;
+            }, condition.toArray(new Object[0]));
         }
+
+        return spanNames
+            .stream()
+            .limit(NAME_QUERY_MAX_SIZE)
+            .collect(toList());
     }
 
     @Override
-    public List<Span> getTrace(final String traceId) throws IOException {
-        StringBuilder sql = new StringBuilder();
-        List<Object> condition = new ArrayList<>(1);
-        sql.append("select * from ").append(ZipkinSpanRecord.INDEX_NAME);
-        sql.append(" where ");
-        sql.append(ZipkinSpanRecord.TRACE_ID).append(" = ?");
-        condition.add(traceId);
-        try (Connection connection = h2Client.getConnection()) {
-            ResultSet resultSet = h2Client.executeQuery(connection, sql.toString(), condition.toArray(new Object[0]));
-            List<Span> trace = new ArrayList<>();
-            while (resultSet.next()) {
-                trace.add(buildSpan(resultSet));
-            }
-            return trace;
-        } catch (SQLException e) {
-            throw new IOException(e);
+    @SneakyThrows
+    public List<Span> getTrace(final String traceId) {
+        final var tables = tableHelper.getTablesWithinTTL(ZipkinSpanRecord.INDEX_NAME);
+        final var trace = new ArrayList<Span>();
+
+        for (String table : tables) {
+            StringBuilder sql = new StringBuilder();
+            List<Object> condition = new ArrayList<>(1);
+            sql.append("select * from ").append(table);
+            sql.append(" where ");
+            sql.append(JDBCTableInstaller.TABLE_COLUMN).append(" = ?");
+            condition.add(ZipkinSpanRecord.INDEX_NAME);
+            sql.append(" and ").append(ZipkinSpanRecord.TRACE_ID).append(" = ?");
+            condition.add(traceId);
+            h2Client.executeQuery(sql.toString(), resultSet -> {
+                while (resultSet.next()) {
+                    trace.add(buildSpan(resultSet));
+                }
+                return null;
+            }, condition.toArray(new Object[0]));
         }
+        return trace;
     }
 
     @Override
-    public List<List<Span>> getTraces(final QueryRequest request, Duration duration) throws IOException {
... 6217 lines suppressed ...