You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@doris.apache.org by kx...@apache.org on 2023/06/06 15:15:22 UTC

[doris] 20/36: [fix][refactor](backend-policy)(compute) refactor the hierarchy of external scan node and fix compute node bug #20402

This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch branch-2.0-beta
in repository https://gitbox.apache.org/repos/asf/doris.git

commit b6a1092d87ef317c295a8c5488db5cf1ad7d25b8
Author: Mingyu Chen <mo...@163.com>
AuthorDate: Tue Jun 6 10:35:30 2023 +0800

    [fix][refactor](backend-policy)(compute) refactor the hierarchy of external scan node and fix compute node bug #20402
    
    There should be 2 kinds of ScanNode:
    
    OlapScanNode
    ExternalScanNode
    The Backends used for ExternalScanNode should be controlled by FederationBackendPolicy.
    But currently, only FileScanNode is controlled by FederationBackendPolicy, other scan node such as MysqlScanNode,
    JdbcScanNode will use Mix Backend even if we enable and prefer to use Compute Backend.
    
    In this PR, I modified the hierarchy of ExternalScanNode, the new hierarchy is:
    
    ScanNode
        OlapScanNode
        SchemaScanNode
        ExternalScanNode
            MetadataScanNode
            DataGenScanNode
            EsScanNode
            OdbcScanNode
            MysqlScanNode
            JdbcScanNode
            FileScanNode
                FileLoadScanNode
                FileQueryScanNode
                    MaxComputeScanNode
                    IcebergScanNode
                    TVFScanNode
                    HiveScanNode
                        HudiScanNode
    And previously, the BackendPolicy is the member of FileScanNode, now I moved it to the ExternalScanNode.
    So that all subtype ExternalScanNode can use BackendPolicy to choose Compute Backend to execute the query.
    
    All all ExternalScanNode should implement the abstract method createScanRangeLocations().
    
    For scan node like jdbc scan node/mysql scan node, the scan range locations will be selected randomly from
    compute node(if preferred).
    
    And for compute node selection. If all scan nodes are external scan nodes, and prefer_compute_node_for_external_table
    is set to true, the BE for this query will only select compute nodes.
---
 .../planner/BackendPartitionedSchemaScanNode.java  | 18 +++-----
 .../org/apache/doris/planner/DataGenScanNode.java  | 37 +++++----------
 .../java/org/apache/doris/planner/EsScanNode.java  | 48 ++++++--------------
 .../org/apache/doris/planner/FileLoadScanNode.java | 29 ++++++++++--
 .../org/apache/doris/planner/JdbcScanNode.java     | 49 ++++++++++++--------
 .../org/apache/doris/planner/MysqlScanNode.java    | 27 ++++-------
 .../org/apache/doris/planner/OdbcScanNode.java     | 32 ++++---------
 .../org/apache/doris/planner/OlapScanNode.java     | 52 ++++++++--------------
 .../java/org/apache/doris/planner/ScanNode.java    | 25 ++++++++++-
 .../org/apache/doris/planner/SchemaScanNode.java   | 15 ++++---
 .../doris/planner/TestExternalTableScanNode.java   | 31 +++++--------
 .../doris/planner/external/ExternalScanNode.java   | 30 +++++++++++--
 .../planner/external/FederationBackendPolicy.java  |  4 ++
 .../doris/planner/external/FileQueryScanNode.java  | 18 ++++----
 .../doris/planner/external/FileScanNode.java       | 12 +----
 .../doris/planner/external/MetadataScanNode.java   | 32 ++++++++-----
 .../apache/doris/planner/external/TVFScanNode.java |  9 ----
 .../doris/planner/external/hudi/HudiScanNode.java  |  2 +-
 .../planner/external/iceberg/IcebergScanNode.java  | 20 ++++-----
 .../main/java/org/apache/doris/qe/Coordinator.java | 48 +++++++++++---------
 .../java/org/apache/doris/qe/CoordinatorTest.java  |  8 ++--
 regression-test/data/es_p0/test_es_query.out       | 46 +++++++++----------
 .../data/es_p0/test_es_query_no_http_url.out       | 10 ++---
 regression-test/suites/es_p0/test_es_query.groovy  |  4 --
 .../suites/es_p0/test_es_query_nereids.groovy      |  5 ---
 .../suites/es_p0/test_es_query_no_http_url.groovy  |  4 --
 .../hive/test_external_yandex_nereids.groovy       |  1 +
 27 files changed, 295 insertions(+), 321 deletions(-)

diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
index 8889373af3..6b2e278a01 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/BackendPartitionedSchemaScanNode.java
@@ -60,7 +60,6 @@ public class BackendPartitionedSchemaScanNode extends SchemaScanNode {
         return false;
     }
 
-    private List<TScanRangeLocations> shardScanRanges;
     // backendPartitionInfo is set in generatePartitionInfo().
     // `backendPartitionInfo` is `List Partition` of Backend_ID, one PartitionItem only have one partitionKey
     // for example: if the alive be are: 10001, 10002, 10003, `backendPartitionInfo` like
@@ -86,24 +85,20 @@ public class BackendPartitionedSchemaScanNode extends SchemaScanNode {
     @Override
     public void finalize(Analyzer analyzer) throws UserException {
         super.finalize(analyzer);
-        shardScanRanges = getScanRangeLocations();
+        createScanRangeLocations();
     }
 
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return shardScanRanges;
+        return scanRangeLocations;
     }
 
     @Override
-    public int getNumInstances() {
-        return shardScanRanges.size();
-    }
-
-    private List<TScanRangeLocations> getScanRangeLocations() throws AnalysisException {
-        List<TScanRangeLocations> result = new ArrayList<>();
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = new ArrayList<>();
         for (Long partitionID : selectedPartitionIds) {
             Long backendId = partitionIDToBackendID.get(partitionID);
-            Backend be  = Env.getCurrentSystemInfo().getIdToBackend().get(backendId);
+            Backend be = Env.getCurrentSystemInfo().getIdToBackend().get(backendId);
             if (!be.isAlive()) {
                 throw new AnalysisException("backend " + be.getId() + " is not alive.");
             }
@@ -113,9 +108,8 @@ public class BackendPartitionedSchemaScanNode extends SchemaScanNode {
             location.setServer(new TNetworkAddress(be.getHost(), be.getBePort()));
             locations.addToLocations(location);
             locations.setScanRange(new TScanRange());
-            result.add(locations);
+            scanRangeLocations.add(locations);
         }
-        return result;
     }
 
     private void computePartitionInfo() throws AnalysisException {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java
index da4dfa1906..46af5ec1ae 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/DataGenScanNode.java
@@ -19,9 +19,9 @@ package org.apache.doris.planner;
 
 import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
-import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.NereidsException;
 import org.apache.doris.common.UserException;
+import org.apache.doris.planner.external.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.tablefunction.DataGenTableValuedFunction;
 import org.apache.doris.tablefunction.TableValuedFunctionTask;
@@ -41,32 +41,25 @@ import java.util.List;
 /**
  * This scan node is used for data source generated from memory.
  */
-public class DataGenScanNode extends ScanNode {
+public class DataGenScanNode extends ExternalScanNode {
     private static final Logger LOG = LogManager.getLogger(DataGenScanNode.class.getName());
 
-    private List<TScanRangeLocations> shardScanRanges;
     private DataGenTableValuedFunction tvf;
     private boolean isFinalized = false;
 
     public DataGenScanNode(PlanNodeId id, TupleDescriptor desc, DataGenTableValuedFunction tvf) {
-        super(id, desc, "DataGenScanNode", StatisticalType.TABLE_VALUED_FUNCTION_NODE);
+        super(id, desc, "DataGenScanNode", StatisticalType.TABLE_VALUED_FUNCTION_NODE, false);
         this.tvf = tvf;
     }
 
     @Override
     public void init(Analyzer analyzer) throws UserException {
         super.init(analyzer);
-        computeStats(analyzer);
-    }
-
-    @Override
-    public int getNumInstances() {
-        return shardScanRanges.size();
     }
 
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return shardScanRanges;
+        return scanRangeLocations;
     }
 
     @Override
@@ -74,12 +67,7 @@ public class DataGenScanNode extends ScanNode {
         if (isFinalized) {
             return;
         }
-        try {
-            shardScanRanges = getShardLocations();
-        } catch (AnalysisException e) {
-            throw new UserException(e.getMessage());
-        }
-
+        createScanRangeLocations();
         isFinalized = true;
     }
 
@@ -92,8 +80,9 @@ public class DataGenScanNode extends ScanNode {
         msg.data_gen_scan_node = dataGenScanNode;
     }
 
-    private List<TScanRangeLocations> getShardLocations() throws AnalysisException {
-        List<TScanRangeLocations> result = Lists.newArrayList();
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = Lists.newArrayList();
         for (TableValuedFunctionTask task : tvf.getTasks()) {
             TScanRangeLocations locations = new TScanRangeLocations();
             TScanRangeLocation location = new TScanRangeLocation();
@@ -101,19 +90,15 @@ public class DataGenScanNode extends ScanNode {
             location.setServer(new TNetworkAddress(task.getBackend().getHost(), task.getBackend().getBePort()));
             locations.addToLocations(location);
             locations.setScanRange(task.getExecParams());
-            result.add(locations);
+            scanRangeLocations.add(locations);
         }
-        return result;
     }
 
     @Override
     public void finalizeForNereids() {
-        if (shardScanRanges != null) {
-            return;
-        }
         try {
-            shardScanRanges = getShardLocations();
-        } catch (AnalysisException e) {
+            createScanRangeLocations();
+        } catch (UserException e) {
             throw new NereidsException("Can not compute shard locations for DataGenScanNode: " + e.getMessage(), e);
         }
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/EsScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/EsScanNode.java
index e2ddb34509..1e80ce17e8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/EsScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/EsScanNode.java
@@ -37,6 +37,7 @@ import org.apache.doris.external.elasticsearch.QueryBuilders;
 import org.apache.doris.external.elasticsearch.QueryBuilders.BoolQueryBuilder;
 import org.apache.doris.external.elasticsearch.QueryBuilders.BuilderOptions;
 import org.apache.doris.external.elasticsearch.QueryBuilders.QueryBuilder;
+import org.apache.doris.planner.external.ExternalScanNode;
 import org.apache.doris.planner.external.FederationBackendPolicy;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.statistics.query.StatsDelta;
@@ -53,7 +54,6 @@ import org.apache.doris.thrift.TScanRangeLocations;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.collect.Multimap;
 import lombok.SneakyThrows;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
@@ -63,20 +63,16 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.Set;
 
 /**
  * ScanNode for Elasticsearch.
  **/
-public class EsScanNode extends ScanNode {
+public class EsScanNode extends ExternalScanNode {
 
     private static final Logger LOG = LogManager.getLogger(EsScanNode.class);
 
-    private final Random random = new Random(System.currentTimeMillis());
-    private Multimap<String, Backend> backendMap;
     private EsTablePartitions esTablePartitions;
-    private List<TScanRangeLocations> shardScanRanges = Lists.newArrayList();
     private EsTable table;
     private QueryBuilder queryBuilder;
     private boolean isFinalized = false;
@@ -89,7 +85,7 @@ public class EsScanNode extends ScanNode {
      * For multicatalog es.
      **/
     public EsScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName, boolean esExternalTable) {
-        super(id, desc, planNodeName, StatisticalType.ES_SCAN_NODE);
+        super(id, desc, planNodeName, StatisticalType.ES_SCAN_NODE, false);
         if (esExternalTable) {
             EsExternalTable externalTable = (EsExternalTable) (desc.getTable());
             table = externalTable.getEsTable();
@@ -102,54 +98,36 @@ public class EsScanNode extends ScanNode {
     @Override
     public void init(Analyzer analyzer) throws UserException {
         super.init(analyzer);
-        computeColumnFilter();
-        computeStats(analyzer);
         buildQuery();
     }
 
+    @Override
     public void init() throws UserException {
-        computeColumnFilter();
+        super.init();
         buildQuery();
     }
 
     @Override
-    public int getNumInstances() {
-        return shardScanRanges.size();
+    public void finalize(Analyzer analyzer) throws UserException {
+        doFinalize();
     }
 
     @Override
-    public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return shardScanRanges;
+    public void finalizeForNereids() throws UserException {
+        doFinalize();
     }
 
-    @Override
-    public void finalize(Analyzer analyzer) throws UserException {
+    private void doFinalize() throws UserException {
         if (isFinalized) {
             return;
         }
-
-        try {
-            shardScanRanges = getShardLocations();
-        } catch (AnalysisException e) {
-            throw new UserException(e.getMessage());
-        }
-
+        createScanRangeLocations();
         isFinalized = true;
     }
 
     @Override
-    public void finalizeForNereids() throws UserException {
-        if (isFinalized) {
-            return;
-        }
-
-        try {
-            shardScanRanges = getShardLocations();
-        } catch (AnalysisException e) {
-            throw new UserException(e.getMessage());
-        }
-
-        isFinalized = true;
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = getShardLocations();
     }
 
     /**
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java
index ef581280c0..c20a916bf9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/FileLoadScanNode.java
@@ -40,11 +40,13 @@ import org.apache.doris.catalog.TableIf;
 import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.UserException;
 import org.apache.doris.load.BrokerFileGroup;
+import org.apache.doris.planner.external.FederationBackendPolicy;
 import org.apache.doris.planner.external.FileGroupInfo;
 import org.apache.doris.planner.external.FileScanNode;
 import org.apache.doris.planner.external.LoadScanProvider;
 import org.apache.doris.rewrite.ExprRewriter;
 import org.apache.doris.statistics.StatisticalType;
+import org.apache.doris.system.BeSelectionPolicy;
 import org.apache.doris.thrift.TBrokerFileStatus;
 import org.apache.doris.thrift.TFileScanRangeParams;
 import org.apache.doris.thrift.TFileType;
@@ -120,8 +122,6 @@ public class FileLoadScanNode extends FileScanNode {
         for (FileGroupInfo fileGroupInfo : fileGroupInfos) {
             this.scanProviders.add(new LoadScanProvider(fileGroupInfo, desc));
         }
-        backendPolicy.init();
-        numNodes = backendPolicy.numBackends();
         initParamCreateContexts(analyzer);
     }
 
@@ -193,18 +193,39 @@ public class FileLoadScanNode extends FileScanNode {
     public void finalize(Analyzer analyzer) throws UserException {
         Preconditions.checkState(contexts.size() == scanProviders.size(),
                 contexts.size() + " vs. " + scanProviders.size());
+        // ATTN: for load scan node, do not use backend policy in ExternalScanNode.
+        // Because backend policy in ExternalScanNode may only contain compute backend.
+        // But for load job, we should select backends from all backends, both compute and mix.
+        BeSelectionPolicy policy = new BeSelectionPolicy.Builder()
+                .needQueryAvailable()
+                .needLoadAvailable()
+                .build();
+        FederationBackendPolicy localBackendPolicy = new FederationBackendPolicy();
+        localBackendPolicy.init(policy);
         for (int i = 0; i < contexts.size(); ++i) {
             FileLoadScanNode.ParamCreateContext context = contexts.get(i);
             LoadScanProvider scanProvider = scanProviders.get(i);
             finalizeParamsForLoad(context, analyzer);
-            createScanRangeLocations(context, scanProvider);
+            createScanRangeLocations(context, scanProvider, localBackendPolicy);
             this.inputSplitsNum += scanProvider.getInputSplitNum();
             this.totalFileSize += scanProvider.getInputFileSize();
         }
     }
 
+    // TODO: This api is for load job only. Will remove it later.
+    private void createScanRangeLocations(FileLoadScanNode.ParamCreateContext context,
+            LoadScanProvider scanProvider, FederationBackendPolicy backendPolicy)
+            throws UserException {
+        scanProvider.createScanRangeLocations(context, backendPolicy, scanRangeLocations);
+    }
+
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        // do nothing, we have already created scan range locations in finalize
+    }
+
     protected void finalizeParamsForLoad(ParamCreateContext context,
-                                         Analyzer analyzer) throws UserException {
+            Analyzer analyzer) throws UserException {
         Map<String, SlotDescriptor> slotDescByName = context.srcSlotDescByName;
         Map<String, Expr> exprMap = context.exprMap;
         TupleDescriptor srcTupleDesc = context.srcTupleDescriptor;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/JdbcScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/JdbcScanNode.java
index c839d5ea95..34bc96255a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/JdbcScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/JdbcScanNode.java
@@ -22,6 +22,7 @@ import org.apache.doris.analysis.Expr;
 import org.apache.doris.analysis.ExprSubstitutionMap;
 import org.apache.doris.analysis.FunctionCallExpr;
 import org.apache.doris.analysis.SlotDescriptor;
+import org.apache.doris.analysis.SlotId;
 import org.apache.doris.analysis.SlotRef;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.catalog.Column;
@@ -31,6 +32,8 @@ import org.apache.doris.catalog.OdbcTable;
 import org.apache.doris.catalog.external.JdbcExternalTable;
 import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.UserException;
+import org.apache.doris.nereids.glue.translator.PlanTranslatorContext;
+import org.apache.doris.planner.external.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.statistics.query.StatsDelta;
@@ -39,7 +42,6 @@ import org.apache.doris.thrift.TJdbcScanNode;
 import org.apache.doris.thrift.TOdbcTableType;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
-import org.apache.doris.thrift.TScanRangeLocations;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.MoreObjects;
@@ -49,8 +51,9 @@ import org.apache.logging.log4j.Logger;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Set;
 
-public class JdbcScanNode extends ScanNode {
+public class JdbcScanNode extends ExternalScanNode {
     private static final Logger LOG = LogManager.getLogger(JdbcScanNode.class);
 
     private final List<String> columns = new ArrayList<String>();
@@ -62,7 +65,7 @@ public class JdbcScanNode extends ScanNode {
     private JdbcTable tbl;
 
     public JdbcScanNode(PlanNodeId id, TupleDescriptor desc, boolean isJdbcExternalTable) {
-        super(id, desc, "JdbcScanNode", StatisticalType.JDBC_SCAN_NODE);
+        super(id, desc, "JdbcScanNode", StatisticalType.JDBC_SCAN_NODE, false);
         if (isJdbcExternalTable) {
             JdbcExternalTable jdbcExternalTable = (JdbcExternalTable) (desc.getTable());
             tbl = jdbcExternalTable.getJdbcTable();
@@ -76,10 +79,20 @@ public class JdbcScanNode extends ScanNode {
     @Override
     public void init(Analyzer analyzer) throws UserException {
         super.init(analyzer);
-        computeStats(analyzer);
         getGraphQueryString();
     }
 
+    /**
+     * Used for Nereids. Should NOT use this function in anywhere else.
+     */
+    @Override
+    public void init() throws UserException {
+        super.init();
+        numNodes = numNodes <= 0 ? 1 : numNodes;
+        StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
+        cardinality = (long) statsDeriveResult.getRowCount();
+    }
+
     private boolean isNebula() {
         return jdbcType == TOdbcTableType.NEBULA;
     }
@@ -99,20 +112,6 @@ public class JdbcScanNode extends ScanNode {
         conjuncts = Lists.newArrayList();
     }
 
-    /**
-     * Used for Nereids. Should NOT use this function in anywhere else.
-     */
-    public void init() throws UserException {
-        numNodes = numNodes <= 0 ? 1 : numNodes;
-        StatsRecursiveDerive.getStatsRecursiveDerive().statsRecursiveDerive(this);
-        cardinality = (long) statsDeriveResult.getRowCount();
-    }
-
-    @Override
-    public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return null;
-    }
-
     private void createJdbcFilters() {
         if (conjuncts.isEmpty()) {
             return;
@@ -139,6 +138,7 @@ public class JdbcScanNode extends ScanNode {
     }
 
     private void createJdbcColumns() {
+        columns.clear();
         for (SlotDescriptor slot : desc.getSlots()) {
             if (!slot.isMaterialized()) {
                 continue;
@@ -212,12 +212,25 @@ public class JdbcScanNode extends ScanNode {
         // Convert predicates to Jdbc columns and filters.
         createJdbcColumns();
         createJdbcFilters();
+        createScanRangeLocations();
     }
 
     @Override
     public void finalizeForNereids() throws UserException {
         createJdbcColumns();
         createJdbcFilters();
+        createScanRangeLocations();
+    }
+
+    @Override
+    public void updateRequiredSlots(PlanTranslatorContext context, Set<SlotId> requiredByProjectSlotIdSet)
+            throws UserException {
+        createJdbcColumns();
+    }
+
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java
index a04b531159..506e95c4b8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/MysqlScanNode.java
@@ -26,13 +26,13 @@ import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.MysqlTable;
 import org.apache.doris.common.UserException;
+import org.apache.doris.planner.external.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TMySQLScanNode;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
-import org.apache.doris.thrift.TScanRangeLocations;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.MoreObjects;
@@ -46,7 +46,7 @@ import java.util.List;
 /**
  * Full scan of an MySQL table.
  */
-public class MysqlScanNode extends ScanNode {
+public class MysqlScanNode extends ExternalScanNode {
     private static final Logger LOG = LogManager.getLogger(MysqlScanNode.class);
 
     private final List<String> columns = new ArrayList<String>();
@@ -57,16 +57,10 @@ public class MysqlScanNode extends ScanNode {
      * Constructs node to scan given data files of table 'tbl'.
      */
     public MysqlScanNode(PlanNodeId id, TupleDescriptor desc, MysqlTable tbl) {
-        super(id, desc, "SCAN MYSQL", StatisticalType.MYSQL_SCAN_NODE);
+        super(id, desc, "SCAN MYSQL", StatisticalType.MYSQL_SCAN_NODE, false);
         tblName = "`" + tbl.getMysqlTableName() + "`";
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        computeStats(analyzer);
-    }
-
     @Override
     protected String debugString() {
         MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);
@@ -78,6 +72,12 @@ public class MysqlScanNode extends ScanNode {
         // Convert predicates to MySQL columns and filters.
         createMySQLColumns(analyzer);
         createMySQLFilters(analyzer);
+        createScanRangeLocations();
+    }
+
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
     @Override
@@ -150,15 +150,6 @@ public class MysqlScanNode extends ScanNode {
         msg.mysql_scan_node = new TMySQLScanNode(desc.getId().asInt(), tblName, columns, filters);
     }
 
-    /**
-     * We query MySQL Meta to get request's data location
-     * extra result info will pass to backend ScanNode
-     */
-    @Override
-    public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return null;
-    }
-
     @Override
     public int getNumInstances() {
         return 1;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/OdbcScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/OdbcScanNode.java
index 32f8fe2f65..e21af36af1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/OdbcScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/OdbcScanNode.java
@@ -33,6 +33,7 @@ import org.apache.doris.catalog.Type;
 import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.UserException;
+import org.apache.doris.planner.external.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.statistics.query.StatsDelta;
@@ -41,7 +42,6 @@ import org.apache.doris.thrift.TOdbcScanNode;
 import org.apache.doris.thrift.TOdbcTableType;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
-import org.apache.doris.thrift.TScanRangeLocations;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.MoreObjects;
@@ -55,7 +55,7 @@ import java.util.List;
 /**
  * Full scan of an ODBC table.
  */
-public class OdbcScanNode extends ScanNode {
+public class OdbcScanNode extends ExternalScanNode {
     private static final Logger LOG = LogManager.getLogger(OdbcScanNode.class);
 
     // Now some database have different function call like doris, now doris do not
@@ -100,19 +100,13 @@ public class OdbcScanNode extends ScanNode {
      * Constructs node to scan given data files of table 'tbl'.
      */
     public OdbcScanNode(PlanNodeId id, TupleDescriptor desc, OdbcTable tbl) {
-        super(id, desc, "SCAN ODBC", StatisticalType.ODBC_SCAN_NODE);
+        super(id, desc, "SCAN ODBC", StatisticalType.ODBC_SCAN_NODE, false);
         connectString = tbl.getConnectString();
         odbcType = tbl.getOdbcTableType();
         tblName = OdbcTable.databaseProperName(odbcType, tbl.getOdbcTableName());
         this.tbl = tbl;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        computeStats(analyzer);
-    }
-
     @Override
     protected String debugString() {
         MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);
@@ -124,6 +118,12 @@ public class OdbcScanNode extends ScanNode {
         // Convert predicates to Odbc columns and filters.
         createOdbcColumns(analyzer);
         createOdbcFilters(analyzer);
+        createScanRangeLocations();
+    }
+
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
     @Override
@@ -229,20 +229,6 @@ public class OdbcScanNode extends ScanNode {
         msg.odbc_scan_node = odbcScanNode;
     }
 
-    /**
-     * We query Odbc Meta to get request's data location
-     * extra result info will pass to backend ScanNode
-     */
-    @Override
-    public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return null;
-    }
-
-    @Override
-    public int getNumInstances() {
-        return 1;
-    }
-
     @Override
     public void computeStats(Analyzer analyzer) throws UserException {
         super.computeStats(analyzer);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
index 908b428aa5..80bac9fb55 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/OlapScanNode.java
@@ -111,7 +111,6 @@ public class OlapScanNode extends ScanNode {
     // average compression ratio in doris storage engine
     private static final int COMPRESSION_RATIO = 5;
 
-    private List<TScanRangeLocations> result = new ArrayList<>();
     /*
      * When the field value is ON, the storage engine can return the data directly
      * without pre-aggregation.
@@ -569,7 +568,7 @@ public class OlapScanNode extends ScanNode {
     public void init() throws UserException {
         selectedPartitionNum = selectedPartitionIds.size();
         try {
-            getScanRangeLocations();
+            createScanRangeLocations();
         } catch (AnalysisException e) {
             throw new UserException(e.getMessage());
         }
@@ -603,7 +602,7 @@ public class OlapScanNode extends ScanNode {
         // prepare stmt evaluate lazily in Coordinator execute
         if (!isFromPrepareStmt) {
             try {
-                getScanRangeLocations();
+                createScanRangeLocations();
             } catch (AnalysisException e) {
                 throw new UserException(e.getMessage());
             }
@@ -730,7 +729,7 @@ public class OlapScanNode extends ScanNode {
                     visibleVersionStr = String.valueOf(visibleVersion);
                 }
             }
-            TScanRangeLocations scanRangeLocations = new TScanRangeLocations();
+            TScanRangeLocations locations = new TScanRangeLocations();
             TPaloScanRange paloRange = new TPaloScanRange();
             paloRange.setDbName("");
             paloRange.setSchemaHash("0");
@@ -796,7 +795,7 @@ public class OlapScanNode extends ScanNode {
                 int port = backend.getBePort();
                 TScanRangeLocation scanRangeLocation = new TScanRangeLocation(new TNetworkAddress(ip, port));
                 scanRangeLocation.setBackendId(replica.getBackendId());
-                scanRangeLocations.addToLocations(scanRangeLocation);
+                locations.addToLocations(scanRangeLocation);
                 paloRange.addToHosts(new TNetworkAddress(ip, port));
                 tabletIsNull = false;
 
@@ -817,11 +816,11 @@ public class OlapScanNode extends ScanNode {
             }
             TScanRange scanRange = new TScanRange();
             scanRange.setPaloScanRange(paloRange);
-            scanRangeLocations.setScanRange(scanRange);
+            locations.setScanRange(scanRange);
 
-            bucketSeq2locations.put(tabletId2BucketSeq.get(tabletId), scanRangeLocations);
+            bucketSeq2locations.put(tabletId2BucketSeq.get(tabletId), locations);
 
-            result.add(scanRangeLocations);
+            scanRangeLocations.add(locations);
         }
 
         if (tablets.size() == 0) {
@@ -884,11 +883,13 @@ public class OlapScanNode extends ScanNode {
         final RollupSelector rollupSelector = new RollupSelector(analyzer, desc, olapTable);
         selectedIndexId = rollupSelector.selectBestRollup(selectedPartitionIds, conjuncts, isPreAggregation);
         updateSlotUniqueId();
-        LOG.debug("select best roll up cost: {} ms, best index id: {}",
-                (System.currentTimeMillis() - start), selectedIndexId);
+        LOG.debug("select best roll up cost: {} ms, best index id: {}", (System.currentTimeMillis() - start),
+                selectedIndexId);
     }
 
-    private void getScanRangeLocations() throws UserException {
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = Lists.newArrayList();
         if (selectedPartitionIds.size() == 0) {
             desc.setCardinality(0);
             return;
@@ -1088,7 +1089,7 @@ public class OlapScanNode extends ScanNode {
      */
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return result;
+        return scanRangeLocations;
     }
 
     // Only called when Coordinator exec in high performance point query
@@ -1100,14 +1101,13 @@ public class OlapScanNode extends ScanNode {
         computePartitionInfo();
         scanBackendIds.clear();
         scanTabletIds.clear();
-        result.clear();
         bucketSeq2locations.clear();
         try {
-            getScanRangeLocations();
+            createScanRangeLocations();
         } catch (AnalysisException e) {
             throw new UserException(e.getMessage());
         }
-        return result;
+        return scanRangeLocations;
     }
 
     public void setDescTable(DescriptorTable descTable) {
@@ -1207,11 +1207,11 @@ public class OlapScanNode extends ScanNode {
         // so here we need count distinct be_num to do the work. make sure get right instance
         if (ConnectContext.get().getSessionVariable().enablePipelineEngine()) {
             int parallelInstance = ConnectContext.get().getSessionVariable().getParallelExecInstanceNum();
-            long numBackend = result.stream().flatMap(rangeLoc -> rangeLoc.getLocations().stream())
-                        .map(loc -> loc.backend_id).distinct().count();
+            long numBackend = scanRangeLocations.stream().flatMap(rangeLoc -> rangeLoc.getLocations().stream())
+                    .map(loc -> loc.backend_id).distinct().count();
             return (int) (parallelInstance * numBackend);
         }
-        return result.size();
+        return scanRangeLocations.size();
     }
 
     @Override
@@ -1353,22 +1353,6 @@ public class OlapScanNode extends ScanNode {
         }
     }
 
-    // export some tablets
-    public static OlapScanNode createOlapScanNodeByLocation(
-            PlanNodeId id, TupleDescriptor desc, String planNodeName, List<TScanRangeLocations> locationsList) {
-        OlapScanNode olapScanNode = new OlapScanNode(id, desc, planNodeName);
-        olapScanNode.numInstances = 1;
-
-        olapScanNode.selectedIndexId = olapScanNode.olapTable.getBaseIndexId();
-        olapScanNode.selectedPartitionNum = 1;
-        olapScanNode.selectedTabletsNum = 1;
-        olapScanNode.totalTabletsNum = 1;
-        olapScanNode.setIsPreAggregation(false, "Export job");
-        olapScanNode.result.addAll(locationsList);
-
-        return olapScanNode;
-    }
-
     public void collectColumns(Analyzer analyzer, Set<String> equivalenceColumns, Set<String> unequivalenceColumns) {
         // 1. Get columns which has predicate on it.
         for (Expr expr : conjuncts) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
index e63662b053..7dc0a7b3de 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
@@ -41,10 +41,14 @@ import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.NotImplementedException;
 import org.apache.doris.common.UserException;
 import org.apache.doris.nereids.glue.translator.PlanTranslatorContext;
+import org.apache.doris.planner.external.FederationBackendPolicy;
 import org.apache.doris.spi.Split;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.statistics.query.StatsDelta;
+import org.apache.doris.system.Backend;
 import org.apache.doris.thrift.TNetworkAddress;
+import org.apache.doris.thrift.TScanRange;
+import org.apache.doris.thrift.TScanRangeLocation;
 import org.apache.doris.thrift.TScanRangeLocations;
 
 import com.google.common.base.MoreObjects;
@@ -74,6 +78,7 @@ public abstract class ScanNode extends PlanNode {
     protected Map<String, ColumnRange> columnNameToRange = Maps.newHashMap();
     protected String sortColumn = null;
     protected Analyzer analyzer;
+    protected List<TScanRangeLocations> scanRangeLocations = Lists.newArrayList();
 
     public ScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName, StatisticalType statisticalType) {
         super(id, desc.getId().asList(), planNodeName, statisticalType);
@@ -128,6 +133,8 @@ public abstract class ScanNode extends PlanNode {
         }
     }
 
+    protected abstract void createScanRangeLocations() throws UserException;
+
     /**
      * Returns all scan ranges plus their locations. Needs to be preceded by a call to
      * finalize().
@@ -170,7 +177,10 @@ public abstract class ScanNode extends PlanNode {
     }
 
     public void computeColumnFilter() {
-        computeColumnFilter(desc.getTable().getBaseSchema());
+        // for load scan node, table is null
+        if (desc.getTable() != null) {
+            computeColumnFilter(desc.getTable().getBaseSchema());
+        }
     }
 
     public static ColumnRange createColumnRange(SlotDescriptor desc,
@@ -593,4 +603,17 @@ public abstract class ScanNode extends PlanNode {
         }
         return delta;
     }
+
+    // Create a single scan range locations for the given backend policy.
+    // Used for those scan nodes which do not require data location.
+    public static TScanRangeLocations createSingleScanRangeLocations(FederationBackendPolicy backendPolicy) {
+        TScanRangeLocations scanRangeLocation = new TScanRangeLocations();
+        scanRangeLocation.setScanRange(new TScanRange());
+        TScanRangeLocation location = new TScanRangeLocation();
+        Backend be = backendPolicy.getNextBe();
+        location.setServer(new TNetworkAddress(be.getHost(), be.getBePort()));
+        location.setBackendId(be.getId());
+        scanRangeLocation.addToLocations(location);
+        return scanRangeLocation;
+    }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java
index 91867f84c6..595d09792f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/SchemaScanNode.java
@@ -24,6 +24,7 @@ import org.apache.doris.common.Config;
 import org.apache.doris.common.UserException;
 import org.apache.doris.common.util.Util;
 import org.apache.doris.datasource.InternalCatalog;
+import org.apache.doris.planner.external.FederationBackendPolicy;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.service.FrontendOptions;
 import org.apache.doris.statistics.StatisticalType;
@@ -34,6 +35,7 @@ import org.apache.doris.thrift.TSchemaScanNode;
 import org.apache.doris.thrift.TUserIdentity;
 
 import com.google.common.base.MoreObjects;
+import com.google.common.collect.Lists;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
@@ -127,13 +129,16 @@ public class SchemaScanNode extends ScanNode {
         msg.schema_scan_node.setCurrentUserIdent(tCurrentUser);
     }
 
-    /**
-     * We query MySQL Meta to get request's data location
-     * extra result info will pass to backend ScanNode
-     */
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return null;
+        return scanRangeLocations;
+    }
+
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        FederationBackendPolicy backendPolicy = new FederationBackendPolicy();
+        backendPolicy.init();
+        scanRangeLocations = Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
index f537d091be..2ce78391e7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/TestExternalTableScanNode.java
@@ -20,40 +20,28 @@ package org.apache.doris.planner;
 import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.UserException;
+import org.apache.doris.planner.external.ExternalScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.statistics.StatsRecursiveDerive;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TPlanNode;
 import org.apache.doris.thrift.TPlanNodeType;
-import org.apache.doris.thrift.TScanRangeLocations;
 import org.apache.doris.thrift.TTestExternalScanNode;
 
 import com.google.common.base.MoreObjects;
+import com.google.common.collect.Lists;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
-import java.util.List;
-
-public class TestExternalTableScanNode extends ScanNode {
+public class TestExternalTableScanNode extends ExternalScanNode {
     private static final Logger LOG = LogManager.getLogger(TestExternalTableScanNode.class);
     private String tableName;
 
     public TestExternalTableScanNode(PlanNodeId id, TupleDescriptor desc) {
-        super(id, desc, "TestExternalTableScanNode", StatisticalType.TEST_EXTERNAL_TABLE);
+        super(id, desc, "TestExternalTableScanNode", StatisticalType.TEST_EXTERNAL_TABLE, true);
         tableName = desc.getTable().getName();
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        computeStats(analyzer);
-    }
-
-    @Override
-    public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
-        return null;
-    }
-
     @Override
     public String getNodeExplainString(String prefix, TExplainLevel detailLevel) {
         StringBuilder output = new StringBuilder();
@@ -63,6 +51,12 @@ public class TestExternalTableScanNode extends ScanNode {
 
     @Override
     public void finalize(Analyzer analyzer) throws UserException {
+        createScanRangeLocations();
+    }
+
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        scanRangeLocations = Lists.newArrayList(createSingleScanRangeLocations(backendPolicy));
     }
 
     @Override
@@ -87,9 +81,4 @@ public class TestExternalTableScanNode extends ScanNode {
         MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);
         return helper.addValue(super.debugString()).toString();
     }
-
-    @Override
-    public int getNumInstances() {
-        return 1;
-    }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/ExternalScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/ExternalScanNode.java
index 24137e1351..2fda6fbd28 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/ExternalScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/ExternalScanNode.java
@@ -17,13 +17,14 @@
 
 package org.apache.doris.planner.external;
 
+import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
+import org.apache.doris.common.UserException;
 import org.apache.doris.planner.PlanNodeId;
 import org.apache.doris.planner.ScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.thrift.TScanRangeLocations;
 
-import com.google.common.collect.Lists;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
@@ -42,8 +43,7 @@ public abstract class ExternalScanNode extends ScanNode {
     // set to false means this scan node does not need to check column priv.
     protected boolean needCheckColumnPriv;
 
-    // Final output of this file scan node
-    protected List<TScanRangeLocations> scanRangeLocations = Lists.newArrayList();
+    protected final FederationBackendPolicy backendPolicy = new FederationBackendPolicy();
 
     public ExternalScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName, StatisticalType statisticalType,
             boolean needCheckColumnPriv) {
@@ -51,6 +51,30 @@ public abstract class ExternalScanNode extends ScanNode {
         this.needCheckColumnPriv = needCheckColumnPriv;
     }
 
+    @Override
+    public void init(Analyzer analyzer) throws UserException {
+        super.init(analyzer);
+        computeStats(analyzer);
+        computeColumnFilter();
+        initBackendPolicy();
+    }
+
+    // For Nereids
+    @Override
+    public void init() throws UserException {
+        computeColumnFilter();
+        initBackendPolicy();
+    }
+
+    protected void initBackendPolicy() throws UserException {
+        backendPolicy.init();
+        numNodes = backendPolicy.numBackends();
+    }
+
+    public FederationBackendPolicy getBackendPolicy() {
+        return backendPolicy;
+    }
+
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
         LOG.debug("There is {} scanRangeLocations for execution.", scanRangeLocations.size());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/FederationBackendPolicy.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/FederationBackendPolicy.java
index aee9252e2e..91300be1ad 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/FederationBackendPolicy.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/FederationBackendPolicy.java
@@ -66,6 +66,10 @@ public class FederationBackendPolicy {
                 .assignExpectBeNum(Config.min_backend_num_for_external_table)
                 .addPreLocations(preLocations)
                 .build();
+        init(policy);
+    }
+
+    public void init(BeSelectionPolicy policy) throws UserException {
         backends.addAll(policy.getCandidateBackends(Env.getCurrentSystemInfo().getIdToBackend().values()));
         if (backends.isEmpty()) {
             throw new UserException("No available backends");
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileQueryScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileQueryScanNode.java
index f83c2529c2..b07a48da2b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileQueryScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileQueryScanNode.java
@@ -101,6 +101,7 @@ public abstract class FileQueryScanNode extends FileScanNode {
     /**
      * Init ExternalFileScanNode, ONLY used for Nereids. Should NOT use this function in anywhere else.
      */
+    @Override
     public void init() throws UserException {
         doInitialize();
     }
@@ -108,10 +109,13 @@ public abstract class FileQueryScanNode extends FileScanNode {
     // Init scan provider and schema related params.
     protected void doInitialize() throws UserException {
         Preconditions.checkNotNull(desc);
-        ExternalTable table = (ExternalTable) desc.getTable();
-        if (table.isView()) {
-            throw new AnalysisException(
-                String.format("Querying external view '%s.%s' is not supported", table.getDbName(), table.getName()));
+        if (desc.getTable() instanceof ExternalTable) {
+            ExternalTable table = (ExternalTable) desc.getTable();
+            if (table.isView()) {
+                throw new AnalysisException(
+                        String.format("Querying external view '%s.%s' is not supported", table.getDbName(),
+                                table.getName()));
+            }
         }
         computeColumnFilter();
         initBackendPolicy();
@@ -144,11 +148,6 @@ public abstract class FileQueryScanNode extends FileScanNode {
         params.setSrcTupleId(-1);
     }
 
-    protected void initBackendPolicy() throws UserException {
-        backendPolicy.init();
-        numNodes = backendPolicy.numBackends();
-    }
-
     /**
      * Reset required_slots in contexts. This is called after Nereids planner do the projection.
      * In the projection process, some slots may be removed. So call this to update the slots info.
@@ -211,6 +210,7 @@ public abstract class FileQueryScanNode extends FileScanNode {
         params.setColumnIdxs(columnIdxs);
     }
 
+    @Override
     public void createScanRangeLocations() throws UserException {
         long start = System.currentTimeMillis();
         List<Split> inputSplits = getSplits();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileScanNode.java
index aa0e923203..694e97a7d9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/FileScanNode.java
@@ -25,7 +25,6 @@ import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.catalog.Column;
 import org.apache.doris.catalog.TableIf;
 import org.apache.doris.common.UserException;
-import org.apache.doris.planner.FileLoadScanNode;
 import org.apache.doris.planner.PlanNodeId;
 import org.apache.doris.qe.ConnectContext;
 import org.apache.doris.spi.Split;
@@ -57,7 +56,7 @@ import java.util.Map;
 /**
  * Base class for External File Scan, including external query and load.
  */
-public class FileScanNode extends ExternalScanNode {
+public abstract class FileScanNode extends ExternalScanNode {
     private static final Logger LOG = LogManager.getLogger(FileScanNode.class);
 
     public static final long DEFAULT_SPLIT_SIZE = 128 * 1024 * 1024; // 128MB
@@ -68,8 +67,6 @@ public class FileScanNode extends ExternalScanNode {
     protected long totalPartitionNum = 0;
     protected long readPartitionNum = 0;
 
-    protected final FederationBackendPolicy backendPolicy = new FederationBackendPolicy();
-
     public FileScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName, StatisticalType statisticalType,
                             boolean needCheckColumnPriv) {
         super(id, desc, planNodeName, statisticalType, needCheckColumnPriv);
@@ -159,13 +156,6 @@ public class FileScanNode extends ExternalScanNode {
         return output.toString();
     }
 
-    // TODO: This api is for load job only. Will remove it later.
-    protected void createScanRangeLocations(FileLoadScanNode.ParamCreateContext context,
-                                            LoadScanProvider scanProvider)
-            throws UserException {
-        scanProvider.createScanRangeLocations(context, backendPolicy, scanRangeLocations);
-    }
-
     protected void setDefaultValueExprs(TableIf tbl,
                                         Map<String, SlotDescriptor> slotDescByName,
                                         TFileScanRangeParams params,
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/MetadataScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/MetadataScanNode.java
index ba48ab5ed2..d16126fd8f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/MetadataScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/MetadataScanNode.java
@@ -21,7 +21,6 @@ import org.apache.doris.analysis.Analyzer;
 import org.apache.doris.analysis.TupleDescriptor;
 import org.apache.doris.common.UserException;
 import org.apache.doris.planner.PlanNodeId;
-import org.apache.doris.planner.ScanNode;
 import org.apache.doris.statistics.StatisticalType;
 import org.apache.doris.system.Backend;
 import org.apache.doris.tablefunction.MetadataTableValuedFunction;
@@ -37,25 +36,17 @@ import com.google.common.collect.Lists;
 
 import java.util.List;
 
-public class MetadataScanNode extends ScanNode {
+public class MetadataScanNode extends ExternalScanNode {
 
     private MetadataTableValuedFunction tvf;
 
     private List<TScanRangeLocations> scanRangeLocations = Lists.newArrayList();
 
-    private final FederationBackendPolicy backendPolicy = new FederationBackendPolicy();
-
     public MetadataScanNode(PlanNodeId id, TupleDescriptor desc, MetadataTableValuedFunction tvf) {
-        super(id, desc, "METADATA_SCAN_NODE", StatisticalType.METADATA_SCAN_NODE);
+        super(id, desc, "METADATA_SCAN_NODE", StatisticalType.METADATA_SCAN_NODE, false);
         this.tvf = tvf;
     }
 
-    @Override
-    public void init(Analyzer analyzer) throws UserException {
-        super.init(analyzer);
-        backendPolicy.init();
-    }
-
     @Override
     protected void toThrift(TPlanNode planNode) {
         planNode.setNodeType(TPlanNodeType.META_SCAN_NODE);
@@ -65,6 +56,23 @@ public class MetadataScanNode extends ScanNode {
         planNode.setMetaScanNode(metaScanNode);
     }
 
+    @Override
+    protected void createScanRangeLocations() throws UserException {
+        TScanRange scanRange = new TScanRange();
+        scanRange.setMetaScanRange(tvf.getMetaScanRange());
+        // set location
+        TScanRangeLocation location = new TScanRangeLocation();
+        Backend backend = backendPolicy.getNextBe();
+        location.setBackendId(backend.getId());
+        location.setServer(new TNetworkAddress(backend.getHost(), backend.getBePort()));
+
+        TScanRangeLocations locations = new TScanRangeLocations();
+        locations.addToLocations(location);
+        locations.setScanRange(scanRange);
+
+        scanRangeLocations.add(locations);
+    }
+
     @Override
     public List<TScanRangeLocations> getScanRangeLocations(long maxScanRangeLength) {
         return scanRangeLocations;
@@ -72,7 +80,7 @@ public class MetadataScanNode extends ScanNode {
 
     @Override
     public void finalize(Analyzer analyzer) throws UserException {
-        buildScanRanges();
+        createScanRangeLocations();
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/TVFScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/TVFScanNode.java
index 9ec6c6c618..0dfb78abed 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/TVFScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/TVFScanNode.java
@@ -34,7 +34,6 @@ import org.apache.doris.thrift.TFileCompressType;
 import org.apache.doris.thrift.TFileFormatType;
 import org.apache.doris.thrift.TFileType;
 
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import org.apache.hadoop.fs.Path;
 import org.apache.logging.log4j.LogManager;
@@ -62,14 +61,6 @@ public class TVFScanNode extends FileQueryScanNode {
         tableValuedFunction = (ExternalFileTableValuedFunction) table.getTvf();
     }
 
-    @Override
-    protected void doInitialize() throws UserException {
-        Preconditions.checkNotNull(desc);
-        computeColumnFilter();
-        initBackendPolicy();
-        initSchemaParams();
-    }
-
     @Override
     protected String getFsName(FileSplit split) {
         return tableValuedFunction.getFsName();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
index aa7a9c50cc..a0277b0377 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
@@ -102,11 +102,11 @@ public class HudiScanNode extends HiveScanNode {
         }
         computeColumnFilter();
         initBackendPolicy();
+        initSchemaParams();
         if (table instanceof HMSExternalTable) {
             source = new HudiHMSSource((HMSExternalTable) table, desc, columnNameToRange);
         }
         Preconditions.checkNotNull(source);
-        initSchemaParams();
     }
 
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/external/iceberg/IcebergScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/external/iceberg/IcebergScanNode.java
index 5038ecb476..09320e6f59 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/external/iceberg/IcebergScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/external/iceberg/IcebergScanNode.java
@@ -24,7 +24,6 @@ import org.apache.doris.catalog.TableIf;
 import org.apache.doris.catalog.external.ExternalTable;
 import org.apache.doris.catalog.external.HMSExternalTable;
 import org.apache.doris.catalog.external.IcebergExternalTable;
-import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.UserException;
 import org.apache.doris.common.util.S3Util;
@@ -83,17 +82,8 @@ public class IcebergScanNode extends FileQueryScanNode {
      */
     public IcebergScanNode(PlanNodeId id, TupleDescriptor desc, boolean needCheckColumnPriv) {
         super(id, desc, "ICEBERG_SCAN_NODE", StatisticalType.ICEBERG_SCAN_NODE, needCheckColumnPriv);
-    }
 
-    @Override
-    protected void doInitialize() throws UserException {
         ExternalTable table = (ExternalTable) desc.getTable();
-        if (table.isView()) {
-            throw new AnalysisException(
-                String.format("Querying external view '%s.%s' is not supported", table.getDbName(), table.getName()));
-        }
-        computeColumnFilter();
-        initBackendPolicy();
         if (table instanceof HMSExternalTable) {
             source = new IcebergHMSSource((HMSExternalTable) table, desc, columnNameToRange);
         } else if (table instanceof IcebergExternalTable) {
@@ -106,11 +96,17 @@ public class IcebergScanNode extends FileQueryScanNode {
                     source = new IcebergApiSource((IcebergExternalTable) table, desc, columnNameToRange);
                     break;
                 default:
-                    throw new UserException("Unknown iceberg catalog type: " + catalogType);
+                    Preconditions.checkState(false, "Unknown iceberg catalog type: " + catalogType);
+                    break;
             }
         }
         Preconditions.checkNotNull(source);
-        initSchemaParams();
+    }
+
+    @Override
+    protected void doInitialize() throws UserException {
+        super.doInitialize();
+
     }
 
     public static void setIcebergParams(TFileRangeDesc rangeDesc, IcebergSplit icebergSplit) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java b/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java
index 1ec5fe623b..a042c0a8ad 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/Coordinator.java
@@ -63,7 +63,7 @@ import org.apache.doris.planner.ScanNode;
 import org.apache.doris.planner.SetOperationNode;
 import org.apache.doris.planner.UnionNode;
 import org.apache.doris.planner.external.ExternalScanNode;
-import org.apache.doris.planner.external.FederationBackendPolicy;
+import org.apache.doris.planner.external.FileQueryScanNode;
 import org.apache.doris.proto.InternalService;
 import org.apache.doris.proto.InternalService.PExecPlanFragmentResult;
 import org.apache.doris.proto.InternalService.PExecPlanFragmentStartRequest;
@@ -268,6 +268,9 @@ public class Coordinator {
         return executionProfile;
     }
 
+    // True if all scan node are ExternalScanNode.
+    private boolean isAllExternalScan = true;
+
     private static class BackendHash implements Funnel<Backend> {
         @Override
         public void funnel(Backend backend, PrimitiveSink primitiveSink) {
@@ -1548,10 +1551,14 @@ public class Coordinator {
             if (fragment.getDataPartition() == DataPartition.UNPARTITIONED) {
                 Reference<Long> backendIdRef = new Reference<Long>();
                 TNetworkAddress execHostport;
-                if (ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()
-                        && !addressToBackendID.isEmpty()) {
-                    // In this case, we only use the BE where the replica selected by the tag is located to execute
-                    // this query. Otherwise, except for the scan node, the rest of the execution nodes of the query
+                if (((ConnectContext.get() != null && ConnectContext.get().isResourceTagsSet()) || (isAllExternalScan
+                        && Config.prefer_compute_node_for_external_table)) && !addressToBackendID.isEmpty()) {
+                    // 2 cases:
+                    // case 1: user set resource tag, we need to use the BE with the specified resource tags.
+                    // case 2: All scan nodes are external scan node,
+                    //         and prefer_compute_node_for_external_table is true, we should only select BE which scan
+                    //         nodes are used.
+                    // Otherwise, except for the scan node, the rest of the execution nodes of the query
                     // can be executed on any BE. addressToBackendID can be empty when this is a constant
                     // select stmt like:
                     //      SELECT  @@session.auto_increment_increment AS auto_increment_increment;
@@ -1731,8 +1738,7 @@ public class Coordinator {
                     // backendIdRef can be null is we call getHostByCurrentBackend() before
                     this.addressToBackendID.put(execHostport, backendIdRef.getRef());
                 }
-                FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport,
-                        0, params);
+                FInstanceExecParam instanceParam = new FInstanceExecParam(null, execHostport, 0, params);
                 params.instanceExecParams.add(instanceParam);
             }
         }
@@ -1745,8 +1751,8 @@ public class Coordinator {
             FragmentExecParams params = fragmentExecParamsMap.get(fragment.getFragmentId());
             // Transform <fragment, runtimeFilterId> to <runtimeFilterId, fragment>
             for (RuntimeFilterId rid : fragment.getTargetRuntimeFilterIds()) {
-                List<FRuntimeFilterTargetParam> targetFragments =
-                        ridToTargetParam.computeIfAbsent(rid, k -> new ArrayList<>());
+                List<FRuntimeFilterTargetParam> targetFragments = ridToTargetParam.computeIfAbsent(rid,
+                        k -> new ArrayList<>());
                 for (final FInstanceExecParam instance : params.instanceExecParams) {
                     targetFragments.add(new FRuntimeFilterTargetParam(instance.instanceId, toBrpcHost(instance.host)));
                 }
@@ -1891,14 +1897,10 @@ public class Coordinator {
         }
     }
 
-    private Map<TNetworkAddress, Long> getReplicaNumPerHost() {
+    private Map<TNetworkAddress, Long> getReplicaNumPerHostForOlapTable() {
         Map<TNetworkAddress, Long> replicaNumPerHost = Maps.newHashMap();
         for (ScanNode scanNode : scanNodes) {
             List<TScanRangeLocations> locationsList = scanNode.getScanRangeLocations(0);
-            if (locationsList == null) {
-                // only analysis olap scan node
-                continue;
-            }
             for (TScanRangeLocations locations : locationsList) {
                 for (TScanRangeLocation location : locations.locations) {
                     if (replicaNumPerHost.containsKey(location.server)) {
@@ -1922,11 +1924,15 @@ public class Coordinator {
             Preconditions.checkNotNull(locations);
             return;
         }
+
         Map<TNetworkAddress, Long> assignedBytesPerHost = Maps.newHashMap();
-        Map<TNetworkAddress, Long> replicaNumPerHost = getReplicaNumPerHost();
+        Map<TNetworkAddress, Long> replicaNumPerHost = getReplicaNumPerHostForOlapTable();
         Collections.shuffle(scanNodes);
         // set scan ranges/locations for scan nodes
         for (ScanNode scanNode : scanNodes) {
+            if (!(scanNode instanceof ExternalScanNode)) {
+                isAllExternalScan = false;
+            }
             List<TScanRangeLocations> locations;
             // the parameters of getScanRangeLocations may ignore, It doesn't take effect
             locations = scanNode.getScanRangeLocations(0);
@@ -2066,17 +2072,17 @@ public class Coordinator {
     }
 
     private void computeScanRangeAssignmentByConsistentHash(
-            ScanNode scanNode,
+            FileQueryScanNode scanNode,
             final List<TScanRangeLocations> locations,
             FragmentScanRangeAssignment assignment,
             Map<TNetworkAddress, Long> assignedBytesPerHost,
             Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
-        FederationBackendPolicy federationBackendPolicy = new FederationBackendPolicy();
-        federationBackendPolicy.init();
-        Collection<Backend> aliveBEs = federationBackendPolicy.getBackends();
+
+        Collection<Backend> aliveBEs = scanNode.getBackendPolicy().getBackends();
         if (aliveBEs.isEmpty()) {
             throw new UserException("No available backends");
         }
+
         int virtualNumber = Math.max(Math.min(512 / aliveBEs.size(), 32), 2);
         ConsistentHash<TScanRangeLocations, Backend> consistentHash = new ConsistentHash<>(
                 Hashing.murmur3_128(), new ScanRangeHash(), new BackendHash(), aliveBEs, virtualNumber);
@@ -2111,10 +2117,10 @@ public class Coordinator {
             FragmentScanRangeAssignment assignment,
             Map<TNetworkAddress, Long> assignedBytesPerHost,
             Map<TNetworkAddress, Long> replicaNumPerHost) throws Exception {
-        if (scanNode instanceof ExternalScanNode) {
+        if (scanNode instanceof FileQueryScanNode) {
             // Use consistent hash to assign the same scan range into the same backend among different queries
             computeScanRangeAssignmentByConsistentHash(
-                    scanNode, locations, assignment, assignedBytesPerHost, replicaNumPerHost);
+                    (FileQueryScanNode) scanNode, locations, assignment, assignedBytesPerHost, replicaNumPerHost);
             return;
         }
         for (TScanRangeLocations scanRangeLocations : locations) {
diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/CoordinatorTest.java b/fe/fe-core/src/test/java/org/apache/doris/qe/CoordinatorTest.java
index 941a587a6d..ca8109e40e 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/qe/CoordinatorTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/qe/CoordinatorTest.java
@@ -64,8 +64,6 @@ import java.util.Map;
 import java.util.Set;
 
 public class CoordinatorTest extends Coordinator {
-
-
     @Mocked
     static Env env;
     @Mocked
@@ -768,7 +766,7 @@ public class CoordinatorTest extends Coordinator {
         olapScanNode.setFragment(fragment);
         List<TScanRangeLocations> locations = new ArrayList<>();
         locations.add(tScanRangeLocations);
-        Deencapsulation.setField(olapScanNode, "result", locations);
+        Deencapsulation.setField(olapScanNode, "scanRangeLocations", locations);
 
         //scanNode2
         PlanFragmentId planFragmentId2 = new PlanFragmentId(2);
@@ -783,7 +781,7 @@ public class CoordinatorTest extends Coordinator {
         olapScanNode2.setFragment(fragment2);
         List<TScanRangeLocations> locations2 = new ArrayList<>();
         locations2.add(tScanRangeLocations);
-        Deencapsulation.setField(olapScanNode2, "result", locations2);
+        Deencapsulation.setField(olapScanNode2, "scanRangeLocations", locations2);
 
         //scanNode3
         PlanFragmentId planFragmentId3 = new PlanFragmentId(3);
@@ -798,7 +796,7 @@ public class CoordinatorTest extends Coordinator {
         olapScanNode3.setFragment(fragment3);
         List<TScanRangeLocations> locations3 = new ArrayList<>();
         locations3.add(tScanRangeLocations);
-        Deencapsulation.setField(olapScanNode3, "result", locations3);
+        Deencapsulation.setField(olapScanNode3, "scanRangeLocations", locations3);
 
         //scan nodes
         List<ScanNode> scanNodes = new ArrayList<>();
diff --git a/regression-test/data/es_p0/test_es_query.out b/regression-test/data/es_p0/test_es_query.out
index 57ddfafd02..4da0342d86 100644
--- a/regression-test/data/es_p0/test_es_query.out
+++ b/regression-test/data/es_p0/test_es_query.out
@@ -1,9 +1,9 @@
 -- This file is automatically generated. You should know what you did if you want to edit this
 -- !sql51 --
-[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	['d', 'e', 'f']	[128, 129, -129, -130]	['192.168.0.1', '127.0.0.1']	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	['a', 'b', 'c']	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
+[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	["d", "e", "f"]	[128, 129, -129, -130]	["192.168.0.1", "127.0.0.1"]	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	["a", "b", "c"]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
 
 -- !sql52 --
-[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	['d', 'e', 'f']	[128, 129, -129, -130]	['192.168.0.1', '127.0.0.1']	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	['a', 'b', 'c']	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
+[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	["d", "e", "f"]	[128, 129, -129, -130]	["192.168.0.1", "127.0.0.1"]	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	["a", "b", "c"]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
 
 -- !sql53 --
 2022-08-08	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T04:10:10	2022-08-08T20:10:10
@@ -12,10 +12,10 @@
 2022-08-08	2022-08-11T12:10:10	2022-08-11T12:10:10	2022-08-11T12:10:10	2022-08-11T11:10:10
 
 -- !sql53 --
-[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	['d', 'e', 'f']	[128, 129, -129, -130]	['192.168.0.1', '127.0.0.1']	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	['a', 'b', 'c']	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
+[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	["d", "e", "f"]	[128, 129, -129, -130]	["192.168.0.1", "127.0.0.1"]	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	["a", "b", "c"]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
 
 -- !sql54 --
-[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	['d', 'e', 'f']	[128, 129, -129, -130]	['192.168.0.1', '127.0.0.1']	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	['a', 'b', 'c']	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
+[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	["d", "e", "f"]	[128, 129, -129, -130]	["192.168.0.1", "127.0.0.1"]	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	["a", "b", "c"]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
 
 -- !sql55 --
 2022-08-08	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T04:10:10	2022-08-08T20:10:10
@@ -24,15 +24,15 @@
 2022-08-08	2022-08-11T12:10:10	2022-08-11T12:10:10	2022-08-11T12:10:10	2022-08-11T11:10:10
 
 -- !sql62 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
 
 -- !sql63 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string3	text3_4*5	5.0	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string3	text3_4*5	5.0	2022-08-08T00:00
 
 -- !sql64 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00
 
 -- !sql65 --
 true	1	128	32768	-1	0	1.0	1	1	1	2020-01-01T00:00	2020-01-01 12:00:00	a	d	192.168.0.1	{"name":"Andy","age":18}
@@ -45,19 +45,19 @@ true	1	128	32768	-1	0	1.0	1	1	1	2020-01-01T00:00	2020-01-01 12:00:00	a	d	192.168
 true	1	128	32768	-1	0	1.0	1	1	1	2020-01-01T00:00	2020-01-01 12:00:00	a	d	192.168.0.1	{"name":"Andy","age":18}
 
 -- !sql67 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
 
 -- !sql72 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
 
 -- !sql73 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00	2022-08-09T12:10:10	1660018210000	2022-08-09T12:10:10	2022-08-09T12:10:10
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string3	text3_4*5	5.0	2022-08-08T00:00	2022-08-10T12:10:10	1660104610000	2022-08-10T12:10:10	2022-08-10T20:10:10
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string4	text3_4*5	6.0	2022-08-08T00:00	2022-08-11T12:10:10	1660191010000	2022-08-11T12:10:10	2022-08-11T11:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00	2022-08-09T12:10:10	1660018210000	2022-08-09T12:10:10	2022-08-09T12:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string3	text3_4*5	5.0	2022-08-08T00:00	2022-08-10T12:10:10	1660104610000	2022-08-10T12:10:10	2022-08-10T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string4	text3_4*5	6.0	2022-08-08T00:00	2022-08-11T12:10:10	1660191010000	2022-08-11T12:10:10	2022-08-11T11:10:10
 
 -- !sql74 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00	2022-08-09T12:10:10	1660018210000	2022-08-09T12:10:10	2022-08-09T12:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00	2022-08-09T12:10:10	1660018210000	2022-08-09T12:10:10	2022-08-09T12:10:10
 
 -- !sql75 --
 true	1	128	32768	-1	0	1.0	1.0	1.0	1.0	2020-01-01	2020-01-01T12:00	a	d	192.168.0.1	{"name":"Andy","age":18}
@@ -72,16 +72,16 @@ true	1	128	32768	-1	0	1.0	1.0	1.0	1.0	2020-01-01	2020-01-01T12:00	a	d	192.168.0.
 true	1	128	32768	-1	0	1.0	1.0	1.0	1.0	2020-01-01	2020-01-01T12:00	a	d	192.168.0.1	{"name":"Andy","age":18}
 
 -- !sql77 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
 
 -- !sql81 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
 
 -- !sql82 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00	2022-08-09T12:10:10	1660018210000	2022-08-09T12:10:10	2022-08-09T12:10:10
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string3	text3_4*5	5.0	2022-08-08T00:00	2022-08-10T12:10:10	1660104610000	2022-08-10T12:10:10	2022-08-10T20:10:10
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string4	text3_4*5	6.0	2022-08-08T00:00	2022-08-11T12:10:10	1660191010000	2022-08-11T12:10:10	2022-08-11T11:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string2	text2	4.0	2022-08-08T00:00	2022-08-09T12:10:10	1660018210000	2022-08-09T12:10:10	2022-08-09T12:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string3	text3_4*5	5.0	2022-08-08T00:00	2022-08-10T12:10:10	1660104610000	2022-08-10T12:10:10	2022-08-10T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string4	text3_4*5	6.0	2022-08-08T00:00	2022-08-11T12:10:10	1660191010000	2022-08-11T12:10:10	2022-08-11T11:10:10
 
 -- !sql83 --
 true	1	128	32768	-1	0	1.0	1.0	1.0	1.0	2020-01-01	2020-01-01T12:00	a	d	192.168.0.1	{"name":"Andy","age":18}
@@ -96,5 +96,5 @@ true	1	128	32768	-1	0	1.0	1.0	1.0	1.0	2020-01-01	2020-01-01T12:00	a	d	192.168.0.
 true	1	128	32768	-1	0	1.0	1.0	1.0	1.0	2020-01-01	2020-01-01T12:00	a	d	192.168.0.1	{"name":"Andy","age":18}
 
 -- !sql85 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
 
diff --git a/regression-test/data/es_p0/test_es_query_no_http_url.out b/regression-test/data/es_p0/test_es_query_no_http_url.out
index b195d6d187..e1c30290dc 100644
--- a/regression-test/data/es_p0/test_es_query_no_http_url.out
+++ b/regression-test/data/es_p0/test_es_query_no_http_url.out
@@ -1,16 +1,16 @@
 -- This file is automatically generated. You should know what you did if you want to edit this
 -- !sql51 --
-[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	['d', 'e', 'f']	[128, 129, -129, -130]	['192.168.0.1', '127.0.0.1']	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	['a', 'b', 'c']	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
+[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	["d", "e", "f"]	[128, 129, -129, -130]	["192.168.0.1", "127.0.0.1"]	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	["a", "b", "c"]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
 
 -- !sql52 --
-[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	['d', 'e', 'f']	[128, 129, -129, -130]	['192.168.0.1', '127.0.0.1']	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	['a', 'b', 'c']	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
+[2020-01-01, 2020-01-02]	[-1, 0, 1, 2]	[0, 1, 2, 3]	["d", "e", "f"]	[128, 129, -129, -130]	["192.168.0.1", "127.0.0.1"]	string1	[1, 2, 3, 4]	2022-08-08	2022-08-08T12:10:10	text#1	[2020-01-01, 2020-01-02]	3.14	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	["a", "b", "c"]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	2022-08-08T12:10:10	2022-08-08T12:10:10	2022-08-08T20:10:10	[1, -2, -3, 4]	[1, 0, 1, 1]	[32768, 32769, -32769, -32770]
 
 -- !sql61 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	['2020-01-01 12:00:00', '2020-01-02 13:01:01']	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01 00:00:00, 2020-01-02 00:00:00]	["2020-01-01 12:00:00", "2020-01-02 13:01:01"]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00
 
 -- !sql71 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
 
 -- !sql81 --
-[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	['192.168.0.1', '127.0.0.1']	['a', 'b', 'c']	[-1, 0, 1, 2]	['{"name":"Andy","age":18}', '{"name":"Tim","age":28}']	[1, 2, 3, 4]	[128, 129, -129, -130]	['d', 'e', 'f']	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
+[1, 0, 1, 1]	[1, -2, -3, 4]	[2020-01-01, 2020-01-02]	[2020-01-01 12:00:00, 2020-01-02 13:01:01]	[1, 2, 3, 4]	[1, 1.1, 1.2, 1.3]	[1, 2, 3, 4]	[32768, 32769, -32769, -32770]	["192.168.0.1", "127.0.0.1"]	["a", "b", "c"]	[-1, 0, 1, 2]	["{"name":"Andy","age":18}", "{"name":"Tim","age":28}"]	[1, 2, 3, 4]	[128, 129, -129, -130]	["d", "e", "f"]	[0, 1, 2, 3]	string1	text#1	3.14	2022-08-08T00:00	2022-08-08T12:10:10	1659931810000	2022-08-08T12:10:10	2022-08-08T20:10:10
 
diff --git a/regression-test/suites/es_p0/test_es_query.groovy b/regression-test/suites/es_p0/test_es_query.groovy
index 2b0251c1e1..24b1817732 100644
--- a/regression-test/suites/es_p0/test_es_query.groovy
+++ b/regression-test/suites/es_p0/test_es_query.groovy
@@ -161,9 +161,5 @@ suite("test_es_query", "p0") {
         order_qt_sql84 """select c_bool[1], c_byte[1], c_short[1], c_integer[1], c_long[1], c_unsigned_long[1], c_float[1], c_half_float[1], c_double[1], c_scaled_float[1], c_date[1], c_datetime[1], c_keyword[1], c_text[1], c_ip[1], c_person[1] from test2"""
         order_qt_sql85 """select * from test1 where esquery(test2, '{"match":{"test2":"text#1"}}')"""
 
-
-        sql """drop catalog if exists es6;"""
-        sql """drop catalog if exists es7;"""
-        sql """drop catalog if exists es8;"""
     }
 }
diff --git a/regression-test/suites/es_p0/test_es_query_nereids.groovy b/regression-test/suites/es_p0/test_es_query_nereids.groovy
index 407d6c2d17..4921ced70f 100644
--- a/regression-test/suites/es_p0/test_es_query_nereids.groovy
+++ b/regression-test/suites/es_p0/test_es_query_nereids.groovy
@@ -154,10 +154,5 @@ suite("test_es_query_nereids", "p0") {
         order_qt_sql82 """select * from test2_20220808 where test4='2022-08-08'"""
         order_qt_sql83 """select c_bool[1], c_byte[1], c_short[1], c_integer[1], c_long[1], c_unsigned_long[1], c_float[1], c_half_float[1], c_double[1], c_scaled_float[1], c_date[1], c_datetime[1], c_keyword[1], c_text[1], c_ip[1], c_person[1] from test1"""
         order_qt_sql84 """select c_bool[1], c_byte[1], c_short[1], c_integer[1], c_long[1], c_unsigned_long[1], c_float[1], c_half_float[1], c_double[1], c_scaled_float[1], c_date[1], c_datetime[1], c_keyword[1], c_text[1], c_ip[1], c_person[1] from test2"""
-
-
-        sql """drop catalog if exists es6_nereids;"""
-        sql """drop catalog if exists es7_nereids;"""
-        sql """drop catalog if exists es8_nereids;"""
     }
 }
diff --git a/regression-test/suites/es_p0/test_es_query_no_http_url.groovy b/regression-test/suites/es_p0/test_es_query_no_http_url.groovy
index e7e6af5b95..3f712fe778 100644
--- a/regression-test/suites/es_p0/test_es_query_no_http_url.groovy
+++ b/regression-test/suites/es_p0/test_es_query_no_http_url.groovy
@@ -143,9 +143,5 @@ suite("test_es_query_no_http_url", "p0") {
         // es8
         sql """switch es8"""
         order_qt_sql81 """select * from test1 where test2='text#1'"""
-
-        sql """drop catalog if exists es6;"""
-        sql """drop catalog if exists es7;"""
-        sql """drop catalog if exists es8;"""
     }
 }
diff --git a/regression-test/suites/external_table_emr_p2/hive/test_external_yandex_nereids.groovy b/regression-test/suites/external_table_emr_p2/hive/test_external_yandex_nereids.groovy
index 9f06df3100..af1a0e5d15 100644
--- a/regression-test/suites/external_table_emr_p2/hive/test_external_yandex_nereids.groovy
+++ b/regression-test/suites/external_table_emr_p2/hive/test_external_yandex_nereids.groovy
@@ -62,6 +62,7 @@ suite("test_external_yandex_nereids", "p2") {
         sql """use multi_catalog;"""
         logger.info("use multi_catalog")
         sql """set enable_nereids_planner=true"""
+        sql """set enable_fallback_to_original_planner=false"""
 
         for (String format in formats) {
             logger.info("Process format " + format)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@doris.apache.org
For additional commands, e-mail: commits-help@doris.apache.org