You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@doris.apache.org by yi...@apache.org on 2022/05/19 08:36:10 UTC

[incubator-doris] branch master updated: [style](fe) code correct rules and name rules (#9670)

This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 235d586f11 [style](fe) code correct rules and name rules (#9670)
235d586f11 is described below

commit 235d586f11965bde9f07a5a7a6b80191ea04cad1
Author: morrySnow <10...@users.noreply.github.com>
AuthorDate: Thu May 19 16:36:03 2022 +0800

    [style](fe) code correct rules and name rules (#9670)
    
    * [style](fe) code correct rules and name rules
    
    * revert some change according to comments
---
 build-support/IntelliJ-code-format.xml             |   2 +-
 fe/check/checkstyle/checkstyle.xml                 |  40 ++-
 .../org/apache/doris/common/io/BitmapValue.java    |  34 +--
 .../java/org/apache/doris/common/io/Codec.java     |   4 +-
 .../doris/common/io/FastByteArrayInputStream.java  |   3 +-
 .../main/java/org/apache/doris/common/io/Hll.java  |  28 +-
 .../main/java/org/apache/doris/common/io/Text.java |  16 +-
 .../apache/doris/common/io/BitmapValueTest.java    | 122 ++++-----
 .../java/org/apache/doris/common/io/HllTest.java   |   2 +-
 .../java/org/apache/doris/alter/AlterHandler.java  |   4 +-
 .../doris/alter/MaterializedViewHandler.java       |   3 +-
 .../org/apache/doris/analysis/AggregateInfo.java   | 288 ++++++++++----------
 .../apache/doris/analysis/AggregateInfoBase.java   |  88 +++---
 .../org/apache/doris/analysis/AnalyticExpr.java    |   9 +-
 .../org/apache/doris/analysis/AnalyticInfo.java    |  86 +++---
 .../org/apache/doris/analysis/AnalyticWindow.java  | 181 +++++++------
 .../java/org/apache/doris/analysis/Analyzer.java   | 160 ++++++-----
 .../org/apache/doris/analysis/ArithmeticExpr.java  |   1 +
 .../org/apache/doris/analysis/BaseTableRef.java    |   6 +-
 .../apache/doris/analysis/BetweenPredicate.java    |  15 ++
 .../org/apache/doris/analysis/BinaryPredicate.java |  31 ++-
 .../org/apache/doris/analysis/BoolLiteral.java     |  15 ++
 .../doris/analysis/BuiltinAggregateFunction.java   |  20 +-
 .../java/org/apache/doris/analysis/CaseExpr.java   |   6 +
 .../java/org/apache/doris/analysis/CastExpr.java   |   5 +
 .../java/org/apache/doris/analysis/ColumnDef.java  |   1 +
 .../apache/doris/analysis/CompoundPredicate.java   |   6 +-
 .../org/apache/doris/analysis/DateLiteral.java     |  31 ++-
 .../org/apache/doris/analysis/DescriptorTable.java |  14 +-
 .../main/java/org/apache/doris/analysis/Expr.java  |  92 +++++--
 .../java/org/apache/doris/analysis/ExprId.java     |   4 +-
 .../apache/doris/analysis/ExprSubstitutionMap.java | 100 ++++---
 .../apache/doris/analysis/ExpressionFunctions.java |   6 +-
 .../java/org/apache/doris/analysis/FromClause.java |  68 ++---
 .../apache/doris/analysis/FunctionCallExpr.java    |   8 +-
 .../org/apache/doris/analysis/FunctionName.java    |  86 +++---
 .../org/apache/doris/analysis/GroupByClause.java   |   8 +-
 .../org/apache/doris/analysis/InPredicate.java     |   8 +-
 .../org/apache/doris/analysis/InlineViewRef.java   |  16 +-
 .../org/apache/doris/analysis/IsNullPredicate.java |   4 +-
 .../org/apache/doris/analysis/LimitElement.java    |   4 +-
 .../org/apache/doris/analysis/OutFileClause.java   |   5 +-
 .../java/org/apache/doris/analysis/QueryStmt.java  |  55 ++--
 .../java/org/apache/doris/analysis/SelectStmt.java | 120 ++++-----
 .../apache/doris/analysis/SetOperationStmt.java    | 174 +++++++-----
 .../apache/doris/analysis/ShowStreamLoadStmt.java  |   8 +-
 .../org/apache/doris/analysis/SlotDescriptor.java  |  44 +--
 .../java/org/apache/doris/analysis/SlotId.java     |   4 +-
 .../java/org/apache/doris/analysis/SlotRef.java    |   4 +-
 .../java/org/apache/doris/analysis/SortInfo.java   |  88 +++---
 .../org/apache/doris/analysis/StatementBase.java   |   8 +-
 .../org/apache/doris/analysis/StmtRewriter.java    |  20 +-
 .../java/org/apache/doris/analysis/Subquery.java   |  12 +-
 .../java/org/apache/doris/analysis/TableRef.java   |  64 ++---
 .../org/apache/doris/analysis/TupleDescriptor.java |  26 +-
 .../java/org/apache/doris/analysis/TupleId.java    |   4 +-
 .../doris/analysis/TupleIsNullPredicate.java       |   9 +-
 .../org/apache/doris/analysis/UserIdentity.java    |   8 +-
 .../java/org/apache/doris/analysis/WithClause.java |  30 ++-
 .../backup/{HDFSStorage.java => HdfsStorage.java}  |   2 +-
 .../org/apache/doris/catalog/AggregateType.java    |  14 +-
 .../org/apache/doris/catalog/AliasFunction.java    |   2 +
 .../java/org/apache/doris/catalog/ArrayType.java   |   7 +
 .../java/org/apache/doris/catalog/Catalog.java     |   3 +-
 .../main/java/org/apache/doris/catalog/Column.java |  11 +
 .../java/org/apache/doris/catalog/ColumnStats.java |  14 +-
 .../org/apache/doris/catalog/DataProperty.java     |   6 +
 .../java/org/apache/doris/catalog/Database.java    |   6 +
 .../java/org/apache/doris/catalog/Function.java    |   2 +-
 .../java/org/apache/doris/catalog/FunctionSet.java |   2 +-
 .../java/org/apache/doris/catalog/OdbcTable.java   |   3 +-
 .../java/org/apache/doris/catalog/Partition.java   |   1 +
 .../org/apache/doris/catalog/PartitionKey.java     |  10 +-
 .../apache/doris/catalog/ReplicaAllocation.java    |   8 +-
 .../org/apache/doris/catalog/ResourceType.java     |   3 +-
 .../org/apache/doris/catalog/ScalarFunction.java   |   8 +-
 .../main/java/org/apache/doris/catalog/Type.java   |  56 ++--
 .../main/java/org/apache/doris/catalog/View.java   |  20 +-
 .../doris/clone/DynamicPartitionScheduler.java     |   2 +-
 .../clone/TwoDimensionalGreedyRebalanceAlgo.java   |  14 +-
 .../java/org/apache/doris/common/IdGenerator.java  |   2 +-
 .../doris/common/ThriftServerEventProcessor.java   |   3 +-
 .../java/org/apache/doris/common/TreeNode.java     |  50 +++-
 .../org/apache/doris/common/proc/BDBJEProcDir.java |   8 +-
 ...abaseProcDir.java => BdbjeDatabaseProcDir.java} |   6 +-
 ...ProcNode.java => BdbjeJournalDataProcNode.java} |   4 +-
 .../doris/common/proc/TabletHealthProcDir.java     |   2 +-
 .../apache/doris/common/util/ReflectionUtils.java  |   4 +-
 .../java/org/apache/doris/common/util/Util.java    |   4 +-
 .../doris/external/elasticsearch/EsNodeInfo.java   |   8 +-
 .../doris/httpv2/controller/HelpController.java    |  12 +-
 .../doris/httpv2/rest/TableQueryPlanAction.java    |  14 +-
 .../doris/httpv2/rest/manager/NodeAction.java      |   6 +-
 .../org/apache/doris/httpv2/util/HttpUtil.java     |   3 +-
 .../apache/doris/httpv2/util/LoadSubmitter.java    |   2 +
 .../doris/httpv2/util/StatementSubmitter.java      |   4 +-
 .../apache/doris/journal/bdbje/BDBEnvironment.java |   2 +-
 .../apache/doris/journal/bdbje/BDBJEJournal.java   |   2 +-
 .../org/apache/doris/ldap/LdapPrivsChecker.java    |  12 +-
 .../java/org/apache/doris/load/LoadErrorHub.java   |   1 +
 .../org/apache/doris/load/StreamLoadRecordMgr.java |  18 +-
 .../apache/doris/load/loadv2/SparkRepository.java  |   8 +-
 .../doris/load/routineload/RoutineLoadManager.java |   6 +-
 .../apache/doris/load/sync/SyncChannelHandle.java  |   2 +-
 .../java/org/apache/doris/load/sync/SyncJob.java   |   2 +-
 .../doris/load/sync/canal/CanalDestination.java    |   4 +-
 .../apache/doris/load/sync/canal/CanalSyncJob.java |   2 +
 .../apache/doris/load/update/UpdatePlanner.java    |   8 +-
 .../apache/doris/monitor/jvm/JvmPauseMonitor.java  |  38 +--
 .../org/apache/doris/monitor/jvm/JvmStats.java     |   4 +-
 .../apache/doris/monitor/unit/ByteSizeUnit.java    |  40 +--
 .../org/apache/doris/monitor/unit/TimeValue.java   |   8 +-
 .../org/apache/doris/mysql/nio/AcceptListener.java |   4 +-
 .../org/apache/doris/mysql/nio/NMysqlChannel.java  |   2 +-
 .../org/apache/doris/mysql/nio/NMysqlServer.java   |   2 +-
 .../org/apache/doris/mysql/nio/ReadListener.java   |   2 +-
 .../org/apache/doris/mysql/privilege/PaloAuth.java |   2 +-
 .../doris/nereids/parser/LogicalPlanBuilder.java   |   1 -
 .../trees/expressions/BinaryExpression.java        |   1 -
 .../org/apache/doris/persist/AlterViewInfo.java    |   4 +-
 .../apache/doris/persist/ColocatePersistInfo.java  |   6 +
 .../org/apache/doris/persist/CreateTableInfo.java  |   6 +
 .../org/apache/doris/persist/OperationType.java    |   2 +-
 .../org/apache/doris/planner/AggregationNode.java  |   6 +-
 .../org/apache/doris/planner/AnalyticPlanner.java  |  12 +-
 .../org/apache/doris/planner/BrokerScanNode.java   |  25 +-
 .../java/org/apache/doris/planner/ColumnBound.java |   8 +-
 .../org/apache/doris/planner/CrossJoinNode.java    |   6 +-
 .../java/org/apache/doris/planner/DataSink.java    |   6 +-
 .../apache/doris/planner/DistributedPlanner.java   | 127 +++++----
 .../org/apache/doris/planner/EmptySetNode.java     |   4 +-
 .../org/apache/doris/planner/ExchangeNode.java     |   4 +-
 .../org/apache/doris/planner/HashJoinNode.java     |  24 +-
 .../doris/planner/ListPartitionPrunerV2.java       |   8 +-
 .../org/apache/doris/planner/PlanFragment.java     |  16 +-
 .../org/apache/doris/planner/PlanFragmentId.java   |   4 +-
 .../java/org/apache/doris/planner/PlanNode.java    |  16 +-
 .../java/org/apache/doris/planner/PlanNodeId.java  |   4 +-
 .../org/apache/doris/planner/PlannerContext.java   |  32 +--
 .../doris/planner/RangePartitionPrunerV2.java      |   8 +-
 .../org/apache/doris/planner/ResultFileSink.java   |   6 +-
 .../org/apache/doris/planner/RuntimeFilter.java    |  55 ++--
 .../doris/planner/RuntimeFilterGenerator.java      |  40 ++-
 .../org/apache/doris/planner/RuntimeFilterId.java  |   4 +-
 .../java/org/apache/doris/planner/ScanNode.java    |   1 +
 .../org/apache/doris/planner/SetOperationNode.java | 121 ++++-----
 .../java/org/apache/doris/qe/AuditLogBuilder.java  |  10 +-
 .../java/org/apache/doris/qe/QeProcessorImpl.java  |   7 +-
 .../org/apache/doris/qe/QueryStateException.java   |   2 +-
 .../java/org/apache/doris/qe/StmtExecutor.java     |   4 +-
 .../org/apache/doris/qe/cache/CacheAnalyzer.java   |   4 +-
 .../apache/doris/qe/cache/CacheCoordinator.java    |   4 +-
 .../java/org/apache/doris/qe/cache/CacheProxy.java |   8 +-
 .../org/apache/doris/qe/cache/PartitionRange.java  |   4 +-
 .../org/apache/doris/qe/cache/RowBatchBuilder.java |   6 +-
 .../main/java/org/apache/doris/resource/Tag.java   |   4 +-
 .../doris/rewrite/BetweenToCompoundRule.java       |   4 +-
 .../doris/rewrite/CompoundPredicateWriteRule.java  |  36 ++-
 .../org/apache/doris/rewrite/ExprRewriter.java     |  46 ++--
 .../rewrite/NormalizeBinaryPredicatesRule.java     |  12 +-
 .../doris/rewrite/RewriteBinaryPredicatesRule.java |   4 +-
 .../doris/rewrite/RewriteDateLiteralRule.java      |   4 +-
 .../org/apache/doris/service/FrontendOptions.java  |   8 +-
 .../apache/doris/statistics/BaseStatsDerive.java   |   4 +-
 .../org/apache/doris/statistics/DeriveFactory.java |   8 +-
 .../main/java/org/apache/doris/system/Backend.java |   6 +
 .../doris/task/UpdateTabletMetaInfoTask.java       |   2 +
 .../java/org/apache/doris/alter/AlterTest.java     |  36 +--
 .../org/apache/doris/alter/RollupJobV2Test.java    |   9 +-
 .../doris/analysis/AdminShowReplicaTest.java       |   4 +-
 .../doris/analysis/CreateDataSyncJobStmtTest.java  |   7 +-
 .../analysis/CreateMaterializedViewStmtTest.java   |   7 +-
 .../org/apache/doris/analysis/DateLiteralTest.java |   2 +-
 .../analysis/TableNameComparedLowercaseTest.java   |   4 +-
 .../analysis/TableNameStoredLowercaseTest.java     |   4 +-
 .../org/apache/doris/backup/BackupJobTest.java     |   6 +-
 .../apache/doris/catalog/CreateEncryptKeyTest.java |   2 +-
 .../apache/doris/catalog/CreateFunctionTest.java   |  10 +-
 .../apache/doris/catalog/CreateTableLikeTest.java  | 296 +++++++++------------
 .../org/apache/doris/catalog/DatabaseTest.java     |   2 +-
 .../doris/catalog/DynamicPartitionTableTest.java   |   2 +-
 .../java/org/apache/doris/catalog/FakeEditLog.java |   2 +-
 .../org/apache/doris/clone/DiskRebalanceTest.java  |   2 +-
 .../java/org/apache/doris/clone/RebalanceTest.java |   2 +-
 .../doris/clone/TabletRepairAndBalanceTest.java    |   2 +-
 .../doris/clone/TabletReplicaTooSlowTest.java      |   2 +-
 .../TwoDimensionalGreedyRebalanceAlgoTest.java     |  12 +-
 .../doris/common/{CIDRTest.java => CidrTest.java}  |   2 +-
 .../org/apache/doris/common/GenericPoolTest.java   |  10 +-
 .../doris/load/loadv2/SparkRepositoryTest.java     |  12 +-
 .../doris/load/update/UpdateStmtExecutorTest.java  |  10 +-
 .../org/apache/doris/mysql/privilege/AuthTest.java |   2 +
 .../persist/BatchModifyPartitionsInfoTest.java     |  22 +-
 .../planner/MaterializedViewFunctionTest.java      | 210 +++++++--------
 .../java/org/apache/doris/planner/PlannerTest.java |   4 +-
 .../apache/doris/planner/ResourceTagQueryTest.java |   2 +-
 .../doris/planner/RuntimeFilterGeneratorTest.java  |  10 +-
 .../apache/doris/planner/UpdatePlannerTest.java    |  36 +--
 .../java/org/apache/doris/qe/CoordinatorTest.java  |  24 +-
 .../org/apache/doris/qe/PartitionCacheTest.java    |  14 +-
 .../org/apache/doris/qe/QueryDetailQueueTest.java  |  12 +-
 .../org/apache/doris/service/ExecuteEnvTest.java   |   8 +-
 .../transaction/DatabaseTransactionMgrTest.java    |  12 +-
 .../transaction/FakeTransactionIDGenerator.java    |   2 +-
 .../org/apache/doris/utframe/AnotherDemoTest.java  |   2 +-
 .../doris/utframe/DemoMultiBackendsTest.java       |   2 +-
 .../apache/doris/utframe/MockedBackendFactory.java |  12 +-
 .../apache/doris/utframe/TestWithFeService.java    |  48 ++--
 .../org/apache/doris/utframe/UtFrameUtils.java     |  48 ++--
 .../java/org/apache/doris/udf/UdfExecutor.java     | 266 +++++++++---------
 .../apache/doris/load/loadv2/dpp/DppColumns.java   |   8 +-
 .../org/apache/doris/load/loadv2/dpp/SparkDpp.java |   2 +
 212 files changed, 2689 insertions(+), 2051 deletions(-)

diff --git a/build-support/IntelliJ-code-format.xml b/build-support/IntelliJ-code-format.xml
index 04c4e7a681..eea3822f66 100644
--- a/build-support/IntelliJ-code-format.xml
+++ b/build-support/IntelliJ-code-format.xml
@@ -66,6 +66,7 @@ under the License.
     <option name="THROWS_KEYWORD_WRAP" value="1" />
     <option name="METHOD_CALL_CHAIN_WRAP" value="1" />
     <option name="BINARY_OPERATION_WRAP" value="1" />
+    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
     <option name="TERNARY_OPERATION_WRAP" value="1" />
     <option name="FOR_STATEMENT_WRAP" value="1" />
     <option name="ARRAY_INITIALIZER_WRAP" value="1" />
@@ -92,4 +93,3 @@ under the License.
     </arrangement>
   </codeStyleSettings>
 </code_scheme>
-
diff --git a/fe/check/checkstyle/checkstyle.xml b/fe/check/checkstyle/checkstyle.xml
index c367452c90..1bd1436b66 100644
--- a/fe/check/checkstyle/checkstyle.xml
+++ b/fe/check/checkstyle/checkstyle.xml
@@ -36,6 +36,10 @@ under the License.
         <property name="file" value="fe/check/checkstyle/suppressions.xml"/>
         <property name="optional" value="true"/>
     </module>
+    <module name="SuppressWithPlainTextCommentFilter">
+        <property name="offCommentFormat" value="CHECKSTYLE OFF"/>
+        <property name="onCommentFormat" value="CHECKSTYLE ON"/>
+    </module>
 
     <module name="FileTabCharacter">
         <property name="eachLine" value="true"/>
@@ -69,6 +73,10 @@ under the License.
     </module>
 
     <module name="TreeWalker">
+        <!-- filter -->
+        <module name="SuppressWithNearbyCommentFilter">
+            <property name="commentFormat" value="CHECKSTYLE IGNORE THIS LINE"/>
+        </module>
         <!-- Annotations -->
         <module name="AnnotationLocation">
             <property name="id" value="AnnotationLocationMostCases"/>
@@ -100,6 +108,7 @@ under the License.
         <module name="NeedBraces">
             <property name="tokens"
                       value="LITERAL_DO, LITERAL_ELSE, LITERAL_FOR, LITERAL_IF, LITERAL_WHILE"/>
+            <property name="severity" value="error"/>
         </module>
         <module name="LeftCurly">
             <property name="tokens"
@@ -135,7 +144,9 @@ under the License.
 
         <!-- Coding -->
         <module name="DeclarationOrder"/>
-        <module name="FallThrough"/>
+        <module name="FallThrough">
+            <property name="severity" value="error"/>
+        </module>
         <module name="IllegalTokenText">
             <property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/>
             <property name="format"
@@ -143,9 +154,13 @@ under the License.
             <property name="message"
                       value="Consider using special escape sequence instead of octal value or Unicode escaped value."/>
         </module>
-        <module name="MissingSwitchDefault"/>
+        <module name="MissingSwitchDefault">
+            <property name="severity" value="error"/>
+        </module>
         <module name="MultipleVariableDeclarations"/>
-        <module name="NoFinalizer"/>
+        <module name="NoFinalizer">
+            <property name="severity" value="error"/>
+        </module>
         <module name="OneStatementPerLine"/>
         <module name="OverloadMethodsDeclarationOrder"/>
         <module name="StringLiteralEquality"/>
@@ -249,20 +264,22 @@ under the License.
         <!-- Naming Conventions -->
         <module name="AbbreviationAsWordInName">
             <property name="ignoreFinal" value="false"/>
-            <property name="allowedAbbreviationLength" value="3"/>
+            <property name="allowedAbbreviationLength" value="4"/>
             <property name="tokens"
                       value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, ANNOTATION_DEF, ANNOTATION_FIELD_DEF,
                     PARAMETER_DEF, VARIABLE_DEF, METHOD_DEF, PATTERN_VARIABLE_DEF, RECORD_DEF,
                     RECORD_COMPONENT_DEF"/>
+            <property name="severity" value="error"/>
         </module>
         <module name="CatchParameterName">
-            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <property name="format" value="^([a-z0-9][a-zA-Z0-9]*)?$"/>
             <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Catch parameter name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="ClassTypeParameterName">
             <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*$)"/>
+            <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Class type name ''{0}'' must match pattern ''{1}''."/>
         </module>
@@ -273,22 +290,26 @@ under the License.
                      value="Interface type name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="LambdaParameterName">
-            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <property name="format" value="^([a-z][a-zA-Z0-9]*)?$"/>
+            <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Lambda parameter name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="LocalVariableName">
-            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <property name="format" value="^([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Local variable name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="MemberName">
-            <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9]*$"/>
+            <property name="format" value="^([a-z][a-zA-Z0-9]*)?$"/>
+            <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Member name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="MethodName">
             <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/>
+            <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Method name ''{0}'' must match pattern ''{1}''."/>
         </module>
@@ -305,7 +326,8 @@ under the License.
                      value="Package name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="ParameterName">
-            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <property name="format" value="^([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <property name="severity" value="error"/>
             <message key="name.invalidPattern"
                      value="Parameter name ''{0}'' must match pattern ''{1}''."/>
         </module>
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/BitmapValue.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/BitmapValue.java
index fe857bf5b3..bef2fba4f4 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/BitmapValue.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/BitmapValue.java
@@ -58,7 +58,7 @@ public class BitmapValue {
     }
 
     public void add(long value) {
-        switch (bitmapType) {
+        switch (bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 singleValue = value;
                 bitmapType = SINGLE_VALUE;
@@ -95,7 +95,7 @@ public class BitmapValue {
     }
 
     public long cardinality() {
-        switch (bitmapType) {
+        switch (bitmapType) {  // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 return 0;
             case SINGLE_VALUE:
@@ -107,7 +107,7 @@ public class BitmapValue {
     }
 
     public void serialize(DataOutput output) throws IOException {
-        switch (bitmapType) {
+        switch (bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 output.writeByte(EMPTY);
                 break;
@@ -155,12 +155,12 @@ public class BitmapValue {
 
     // In-place bitwise AND (intersection) operation. The current bitmap is modified.
     public void and(BitmapValue other) {
-        switch (other.bitmapType) {
+        switch (other.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 clear();
                 break;
             case SINGLE_VALUE:
-                switch (this.bitmapType) {
+                switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case EMPTY:
                         break;
                     case SINGLE_VALUE:
@@ -180,7 +180,7 @@ public class BitmapValue {
                 }
                 break;
             case BITMAP_VALUE:
-                switch (this.bitmapType) {
+                switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case EMPTY:
                         break;
                     case SINGLE_VALUE:
@@ -199,14 +199,14 @@ public class BitmapValue {
 
     // In-place bitwise OR (union) operation. The current bitmap is modified.
     public void or(BitmapValue other) {
-        switch (other.bitmapType) {
+        switch (other.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 break;
             case SINGLE_VALUE:
                 add(other.singleValue);
                 break;
             case BITMAP_VALUE:
-                switch (this.bitmapType) {
+                switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case EMPTY:
                         // deep copy the bitmap in case of multi-rollups update the bitmap repeatedly
                         this.bitmap = new Roaring64Map();
@@ -228,7 +228,7 @@ public class BitmapValue {
     }
 
     public void remove(long value){
-        switch (this.bitmapType){
+        switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 break;
             case SINGLE_VALUE:
@@ -245,14 +245,14 @@ public class BitmapValue {
 
     //In-place bitwise ANDNOT (difference) operation. The current bitmap is modified
     public void not(BitmapValue other) {
-        switch (other.bitmapType) {
+        switch (other.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 break;
             case SINGLE_VALUE:
                 remove(other.singleValue);
                 break;
             case BITMAP_VALUE:
-                switch (this.bitmapType) {
+                switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case EMPTY:
                         break;
                     case SINGLE_VALUE:
@@ -271,11 +271,11 @@ public class BitmapValue {
 
     //In-place bitwise XOR (symmetric difference) operation. The current bitmap is modified
     public void xor(BitmapValue other) {
-        switch (other.bitmapType) {
+        switch (other.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 break;
             case SINGLE_VALUE:
-                switch (this.bitmapType){
+                switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case EMPTY:
                         add(other.singleValue);
                         break;
@@ -297,7 +297,7 @@ public class BitmapValue {
                 }
                 break;
             case BITMAP_VALUE:
-                switch (this.bitmapType) {
+                switch (this.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case EMPTY:
                         this.bitmap = other.bitmap;
                         this.bitmapType = BITMAP_VALUE;
@@ -325,7 +325,7 @@ public class BitmapValue {
         if (this.bitmapType != other.bitmapType) {
             return false;
         }
-        switch (other.bitmapType) {
+        switch (other.bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 ret = true;
                 break;
@@ -348,7 +348,7 @@ public class BitmapValue {
     // TODO(wb): keep getSizeInBytes consistent with be and refactor roaring
     public long getSizeInBytes() {
         long size = 0;
-        switch (bitmapType) {
+        switch (bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 size = 1;
                 break;
@@ -368,7 +368,7 @@ public class BitmapValue {
     @Override
     public String toString() {
         String toStringStr = "{}";
-        switch (bitmapType) {
+        switch (bitmapType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case EMPTY:
                 break;
             case SINGLE_VALUE:
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/Codec.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/Codec.java
index 05dada1601..2d783a3f38 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/Codec.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/Codec.java
@@ -26,7 +26,7 @@ public class Codec {
     // not support encode negative value now
     public static void encodeVarint64(long source, DataOutput out) throws IOException {
         assert source >= 0;
-        short B = 128;
+        short B = 128; // CHECKSTYLE IGNORE THIS LINE
 
         while (source >= B) {
             out.write((int) (source & (B - 1) | B));
@@ -39,7 +39,7 @@ public class Codec {
     public static long decodeVarint64(DataInput in) throws IOException {
         long result = 0;
         int shift = 0;
-        short B = 128;
+        short B = 128; // CHECKSTYLE IGNORE THIS LINE
 
         while (true) {
             int oneByte = in.readUnsignedByte();
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/FastByteArrayInputStream.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/FastByteArrayInputStream.java
index c34ccf02e1..0375068f48 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/FastByteArrayInputStream.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/FastByteArrayInputStream.java
@@ -53,8 +53,9 @@ public class FastByteArrayInputStream extends InputStream {
     }
 
     public final int read(byte[] b, int off, int len) {
-        if (pos >= count)
+        if (pos >= count) {
             return -1;
+        }
 
         if ((pos + len) > count) {
             len = (count - pos);
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/Hll.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/Hll.java
index 36385072a3..ed02b0a489 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/Hll.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/Hll.java
@@ -116,7 +116,7 @@ public class Hll {
     }
 
     public void update(long hashValue) {
-        switch (this.type) {
+        switch (this.type) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case HLL_DATA_EMPTY:
                 hashSet.add(hashValue);
                 type = HLL_DATA_EXPLICIT;
@@ -128,7 +128,7 @@ public class Hll {
                 }
                 convertExplicitToRegister();
                 type = HLL_DATA_FULL;
-            case HLL_DATA_SPARSE:
+            case HLL_DATA_SPARSE: // CHECKSTYLE IGNORE THIS LINE: fall through
             case HLL_DATA_FULL:
                 updateRegisters(hashValue);
                 break;
@@ -139,10 +139,10 @@ public class Hll {
         if (other.type == HLL_DATA_EMPTY) {
             return;
         }
-        switch (this.type) {
+        switch (this.type) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case HLL_DATA_EMPTY:
                 this.type = other.type;
-                switch (other.type) {
+                switch (other.type) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case HLL_DATA_EXPLICIT:
                         this.hashSet.addAll(other.hashSet);
                         break;
@@ -154,7 +154,7 @@ public class Hll {
                 }
                 break;
             case HLL_DATA_EXPLICIT:
-                switch (other.type) {
+                switch (other.type) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case HLL_DATA_EXPLICIT:
                         this.hashSet.addAll(other.hashSet);
                         if (this.hashSet.size() > HLL_EXPLICLIT_INT64_NUM) {
@@ -172,7 +172,7 @@ public class Hll {
                 break;
             case HLL_DATA_SPARSE:
             case HLL_DATA_FULL:
-                switch (other.type) {
+                switch (other.type) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                     case HLL_DATA_EXPLICIT:
                         for (long value : other.hashSet) {
                             update(value);
@@ -188,7 +188,7 @@ public class Hll {
     }
 
     public void serialize(DataOutput output) throws IOException {
-        switch (type) {
+        switch (type) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case HLL_DATA_EMPTY:
                 output.writeByte(type);
                 break;
@@ -363,20 +363,20 @@ public class Hll {
         }
 
         final int index = (nblocks << 3);
-        switch (length - index) {
+        switch (length - index) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case 7:
                 h ^= ((long) data[index + 6] & 0xff) << 48;
-            case 6:
+            case 6: // CHECKSTYLE IGNORE THIS LINE: fall through
                 h ^= ((long) data[index + 5] & 0xff) << 40;
-            case 5:
+            case 5: // CHECKSTYLE IGNORE THIS LINE: fall through
                 h ^= ((long) data[index + 4] & 0xff) << 32;
-            case 4:
+            case 4: // CHECKSTYLE IGNORE THIS LINE: fall through
                 h ^= ((long) data[index + 3] & 0xff) << 24;
-            case 3:
+            case 3: // CHECKSTYLE IGNORE THIS LINE: fall through
                 h ^= ((long) data[index + 2] & 0xff) << 16;
-            case 2:
+            case 2: // CHECKSTYLE IGNORE THIS LINE: fall through
                 h ^= ((long) data[index + 1] & 0xff) << 8;
-            case 1:
+            case 1: // CHECKSTYLE IGNORE THIS LINE: fall through
                 h ^= ((long) data[index] & 0xff);
                 h *= M64;
         }
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java
index 8309e245ba..a8977e5ed7 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java
@@ -469,7 +469,7 @@ public class Text implements Writable {
         while (count < start + len) {
             int aByte = ((int) utf8[count] & 0xFF);
 
-            switch (state) {
+            switch (state) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case LEAD_BYTE:
                 leadByte = aByte;
                 length = bytesFromUTF8[aByte];
@@ -520,8 +520,9 @@ public class Text implements Writable {
                 }
                 // falls through to regular trail-byte test!!
             case TRAIL_BYTE:
-                if (aByte < 0x80 || aByte > 0xBF)
+                if (aByte < 0x80 || aByte > 0xBF) {
                     throw new MalformedInputException(count);
+                }
                 if (--length == 0) {
                     state = LEAD_BYTE;
                 } else {
@@ -570,28 +571,35 @@ public class Text implements Writable {
         byte b = bytes.get();
         bytes.reset();
         int extraBytesToRead = bytesFromUTF8[(b & 0xFF)];
-        if (extraBytesToRead < 0)
+        if (extraBytesToRead < 0) {
             return -1; // trailing byte!
+        }
         int ch = 0;
 
-        switch (extraBytesToRead) {
+        switch (extraBytesToRead) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
         case 5:
             ch += (bytes.get() & 0xFF);
             ch <<= 6; /* remember, illegal UTF-8 */
+            // CHECKSTYLE IGNORE THIS LINE: fall through
         case 4:
             ch += (bytes.get() & 0xFF);
             ch <<= 6; /* remember, illegal UTF-8 */
+            // CHECKSTYLE IGNORE THIS LINE: fall through
         case 3:
             ch += (bytes.get() & 0xFF);
             ch <<= 6;
+            // CHECKSTYLE IGNORE THIS LINE: fall through
         case 2:
             ch += (bytes.get() & 0xFF);
             ch <<= 6;
+            // CHECKSTYLE IGNORE THIS LINE: fall through
         case 1:
             ch += (bytes.get() & 0xFF);
             ch <<= 6;
+            // CHECKSTYLE IGNORE THIS LINE: fall through
         case 0:
             ch += (bytes.get() & 0xFF);
+            // CHECKSTYLE IGNORE THIS LINE: fall through, missing switch default
         }
         ch -= offsetsFromUTF8[extraBytesToRead];
 
diff --git a/fe/fe-common/src/test/java/org/apache/doris/common/io/BitmapValueTest.java b/fe/fe-common/src/test/java/org/apache/doris/common/io/BitmapValueTest.java
index d30e734c91..8c802da3ee 100644
--- a/fe/fe-common/src/test/java/org/apache/doris/common/io/BitmapValueTest.java
+++ b/fe/fe-common/src/test/java/org/apache/doris/common/io/BitmapValueTest.java
@@ -102,112 +102,112 @@ public class BitmapValueTest {
     public void testBitmapValueAnd() {
         // empty and empty
         BitmapValue bitmapValue1 = new BitmapValue();
-        BitmapValue bitmapValue1_1 = new BitmapValue();
-        bitmapValue1.and(bitmapValue1_1);
+        BitmapValue bitmapValue1Dot1 = new BitmapValue();
+        bitmapValue1.and(bitmapValue1Dot1);
         Assert.assertTrue(bitmapValue1.getBitmapType() == BitmapValue.EMPTY);
         Assert.assertTrue(bitmapValue1.cardinality() == 0);
 
         // empty and single value
         BitmapValue bitmapValue2 = new BitmapValue();
-        BitmapValue bitmapValue2_1 = new BitmapValue();
-        bitmapValue2_1.add(1);
-        bitmapValue2.and(bitmapValue2_1);
+        BitmapValue bitmapValue2Dot1 = new BitmapValue();
+        bitmapValue2Dot1.add(1);
+        bitmapValue2.and(bitmapValue2Dot1);
         Assert.assertTrue(bitmapValue2.getBitmapType() == BitmapValue.EMPTY);
         Assert.assertTrue(bitmapValue2.cardinality() == 0);
 
         // empty and bitmap
         BitmapValue bitmapValue3 = new BitmapValue();
-        BitmapValue bitmapValue3_1 =new BitmapValue();
-        bitmapValue3_1.add(1);
-        bitmapValue3_1.add(2);
-        bitmapValue3.and(bitmapValue3_1);
+        BitmapValue bitmapValue3Dot1 =new BitmapValue();
+        bitmapValue3Dot1.add(1);
+        bitmapValue3Dot1.add(2);
+        bitmapValue3.and(bitmapValue3Dot1);
         Assert.assertTrue(bitmapValue2.getBitmapType() == BitmapValue.EMPTY);
         Assert.assertTrue(bitmapValue3.cardinality() == 0);
 
         // single value and empty
         BitmapValue bitmapValue4 = new BitmapValue();
         bitmapValue4.add(1);
-        BitmapValue bitmapValue4_1 = new BitmapValue();
-        bitmapValue4.and(bitmapValue4_1);
+        BitmapValue bitmapValue4Dot1 = new BitmapValue();
+        bitmapValue4.and(bitmapValue4Dot1);
         Assert.assertTrue(bitmapValue4.getBitmapType() == BitmapValue.EMPTY);
         Assert.assertTrue(bitmapValue4.cardinality() == 0);
 
         // single value and single value
         BitmapValue bitmapValue5 = new BitmapValue();
         bitmapValue5.add(1);
-        BitmapValue bitmapValue5_1 = new BitmapValue();
-        bitmapValue5_1.add(1);
-        bitmapValue5.and(bitmapValue5_1);
+        BitmapValue bitmapValue5Dot1 = new BitmapValue();
+        bitmapValue5Dot1.add(1);
+        bitmapValue5.and(bitmapValue5Dot1);
         Assert.assertTrue(bitmapValue5.getBitmapType() == BitmapValue.SINGLE_VALUE);
         Assert.assertTrue(bitmapValue5.contains(1));
 
         bitmapValue5.clear();
-        bitmapValue5_1.clear();
+        bitmapValue5Dot1.clear();
         bitmapValue5.add(1);
-        bitmapValue5_1.add(2);
-        bitmapValue5.and(bitmapValue5_1);
+        bitmapValue5Dot1.add(2);
+        bitmapValue5.and(bitmapValue5Dot1);
         Assert.assertTrue(bitmapValue5.getBitmapType() == BitmapValue.EMPTY);
 
         // single value and bitmap
         BitmapValue bitmapValue6 = new BitmapValue();
         bitmapValue6.add(1);
-        BitmapValue bitmapValue6_1 = new BitmapValue();
-        bitmapValue6_1.add(1);
-        bitmapValue6_1.add(2);
-        bitmapValue6.and(bitmapValue6_1);
+        BitmapValue bitmapValue6Dot1 = new BitmapValue();
+        bitmapValue6Dot1.add(1);
+        bitmapValue6Dot1.add(2);
+        bitmapValue6.and(bitmapValue6Dot1);
         Assert.assertTrue(bitmapValue6.getBitmapType() == BitmapValue.SINGLE_VALUE);
 
         bitmapValue6.clear();
         bitmapValue6.add(3);
-        bitmapValue6.and(bitmapValue6_1);
+        bitmapValue6.and(bitmapValue6Dot1);
         Assert.assertTrue(bitmapValue6.getBitmapType() == BitmapValue.EMPTY);
 
         // bitmap and empty
         BitmapValue bitmapValue7 = new BitmapValue();
         bitmapValue7.add(1);
         bitmapValue7.add(2);
-        BitmapValue bitmapValue7_1 = new BitmapValue();
-        bitmapValue7.and(bitmapValue7_1);
+        BitmapValue bitmapValue7Dot1 = new BitmapValue();
+        bitmapValue7.and(bitmapValue7Dot1);
         Assert.assertTrue(bitmapValue7.getBitmapType() == BitmapValue.EMPTY);
 
         // bitmap and single value
         BitmapValue bitmapValue8 = new BitmapValue();
         bitmapValue8.add(1);
         bitmapValue8.add(2);
-        BitmapValue bitmapValue8_1 = new BitmapValue();
-        bitmapValue8_1.add(1);
-        bitmapValue8.and(bitmapValue8_1);
+        BitmapValue bitmapValue8Dot1 = new BitmapValue();
+        bitmapValue8Dot1.add(1);
+        bitmapValue8.and(bitmapValue8Dot1);
         Assert.assertTrue(bitmapValue8.getBitmapType() == BitmapValue.SINGLE_VALUE);
 
         bitmapValue8.clear();
         bitmapValue8.add(2);
         bitmapValue8.add(3);
-        bitmapValue8.and(bitmapValue8_1);
+        bitmapValue8.and(bitmapValue8Dot1);
         Assert.assertTrue(bitmapValue8.getBitmapType() == BitmapValue.EMPTY);
 
         // bitmap and bitmap
         BitmapValue bitmapValue9 = new BitmapValue();
         bitmapValue9.add(1);
         bitmapValue9.add(2);
-        BitmapValue bitmapValue9_1 = new BitmapValue();
-        bitmapValue9_1.add(2);
-        bitmapValue9_1.add(3);
-        bitmapValue9.and(bitmapValue9_1);
+        BitmapValue bitmapValue9Dot1 = new BitmapValue();
+        bitmapValue9Dot1.add(2);
+        bitmapValue9Dot1.add(3);
+        bitmapValue9.and(bitmapValue9Dot1);
         Assert.assertTrue(bitmapValue9.getBitmapType() == BitmapValue.SINGLE_VALUE);
 
         bitmapValue9.clear();
         bitmapValue9.add(4);
         bitmapValue9.add(5);
-        bitmapValue9.and(bitmapValue9_1);
+        bitmapValue9.and(bitmapValue9Dot1);
         Assert.assertTrue(bitmapValue9.getBitmapType() == BitmapValue.EMPTY);
 
         bitmapValue9.clear();
         bitmapValue9.add(2);
         bitmapValue9.add(3);
         bitmapValue9.add(4);
-        bitmapValue9.and(bitmapValue9_1);
+        bitmapValue9.and(bitmapValue9Dot1);
         Assert.assertTrue(bitmapValue9.getBitmapType() == BitmapValue.BITMAP_VALUE);
-        Assert.assertTrue(bitmapValue9.equals(bitmapValue9_1));
+        Assert.assertTrue(bitmapValue9.equals(bitmapValue9Dot1));
 
     }
 
@@ -215,77 +215,77 @@ public class BitmapValueTest {
     public void testBitmapValueOr() {
         // empty or empty
         BitmapValue bitmapValue1 = new BitmapValue();
-        BitmapValue bitmapValue1_1 = new BitmapValue();
-        bitmapValue1.or(bitmapValue1_1);
+        BitmapValue bitmapValue1Dot1 = new BitmapValue();
+        bitmapValue1.or(bitmapValue1Dot1);
         Assert.assertTrue(bitmapValue1.getBitmapType() == BitmapValue.EMPTY);
 
         // empty or single value
         BitmapValue bitmapValue2 = new BitmapValue();
-        BitmapValue bitmapValue2_1 = new BitmapValue();
-        bitmapValue2_1.add(1);
-        bitmapValue2.or(bitmapValue2_1);
+        BitmapValue bitmapValue2Dot1 = new BitmapValue();
+        bitmapValue2Dot1.add(1);
+        bitmapValue2.or(bitmapValue2Dot1);
         Assert.assertTrue(bitmapValue2.getBitmapType() == BitmapValue.SINGLE_VALUE);
 
         // empty or bitmap
         BitmapValue bitmapValue3 = new BitmapValue();
-        BitmapValue bitmapValue3_1 = new BitmapValue();
-        bitmapValue3_1.add(1);
-        bitmapValue3_1.add(2);
-        bitmapValue3.or(bitmapValue3_1);
+        BitmapValue bitmapValue3Dot1 = new BitmapValue();
+        bitmapValue3Dot1.add(1);
+        bitmapValue3Dot1.add(2);
+        bitmapValue3.or(bitmapValue3Dot1);
         Assert.assertTrue(bitmapValue3.getBitmapType() == BitmapValue.BITMAP_VALUE);
 
         // single or and empty
         BitmapValue bitmapValue4 = new BitmapValue();
-        BitmapValue bitmapValue4_1 = new BitmapValue();
+        BitmapValue bitmapValue4Dot1 = new BitmapValue();
         bitmapValue4.add(1);
-        bitmapValue4.or(bitmapValue4_1);
+        bitmapValue4.or(bitmapValue4Dot1);
         Assert.assertTrue(bitmapValue4.getBitmapType() == BitmapValue.SINGLE_VALUE);
 
         // single or and single value
         BitmapValue bitmapValue5 = new BitmapValue();
-        BitmapValue bitmapValue5_1 = new BitmapValue();
+        BitmapValue bitmapValue5Dot1 = new BitmapValue();
         bitmapValue5.add(1);
-        bitmapValue5_1.add(1);
-        bitmapValue5.or(bitmapValue5_1);
+        bitmapValue5Dot1.add(1);
+        bitmapValue5.or(bitmapValue5Dot1);
         Assert.assertTrue(bitmapValue5.getBitmapType() == BitmapValue.SINGLE_VALUE);
 
         bitmapValue5.clear();
         bitmapValue5.add(2);
-        bitmapValue5.or(bitmapValue5_1);
+        bitmapValue5.or(bitmapValue5Dot1);
         Assert.assertTrue(bitmapValue5.getBitmapType() == BitmapValue.BITMAP_VALUE);
 
         // single or and bitmap
         BitmapValue bitmapValue6 = new BitmapValue();
-        BitmapValue bitmapValue6_1 = new BitmapValue();
+        BitmapValue bitmapValue6Dot1 = new BitmapValue();
         bitmapValue6.add(1);
-        bitmapValue6_1.add(1);
-        bitmapValue6_1.add(2);
-        bitmapValue6.or(bitmapValue6_1);
+        bitmapValue6Dot1.add(1);
+        bitmapValue6Dot1.add(2);
+        bitmapValue6.or(bitmapValue6Dot1);
         Assert.assertTrue(bitmapValue6.getBitmapType() == BitmapValue.BITMAP_VALUE);
 
         // bitmap or empty
         BitmapValue bitmapValue7 = new BitmapValue();
         bitmapValue7.add(1);
         bitmapValue7.add(2);
-        BitmapValue bitmapValue7_1 =new BitmapValue();
-        bitmapValue7.or(bitmapValue7_1);
+        BitmapValue bitmapValue7Dot1 =new BitmapValue();
+        bitmapValue7.or(bitmapValue7Dot1);
         Assert.assertTrue(bitmapValue7.getBitmapType() == BitmapValue.BITMAP_VALUE);
 
         // bitmap or single value
         BitmapValue bitmapValue8 = new BitmapValue();
         bitmapValue8.add(1);
         bitmapValue8.add(2);
-        BitmapValue bitmapValue8_1 =new BitmapValue();
-        bitmapValue8_1.add(1);
-        bitmapValue8.or(bitmapValue8_1);
+        BitmapValue bitmapValue8Dot1 =new BitmapValue();
+        bitmapValue8Dot1.add(1);
+        bitmapValue8.or(bitmapValue8Dot1);
         Assert.assertTrue(bitmapValue8.getBitmapType() == BitmapValue.BITMAP_VALUE);
 
         // bitmap or bitmap
         BitmapValue bitmapValue9 = new BitmapValue();
         bitmapValue9.add(1);
         bitmapValue9.add(2);
-        BitmapValue bitmapValue9_1 =new BitmapValue();
-        bitmapValue9.or(bitmapValue9_1);
+        BitmapValue bitmapValue9Dot1 =new BitmapValue();
+        bitmapValue9.or(bitmapValue9Dot1);
         Assert.assertTrue(bitmapValue9.getBitmapType() == BitmapValue.BITMAP_VALUE);
     }
 
diff --git a/fe/fe-common/src/test/java/org/apache/doris/common/io/HllTest.java b/fe/fe-common/src/test/java/org/apache/doris/common/io/HllTest.java
index eecf3ecb94..78ba33b692 100644
--- a/fe/fe-common/src/test/java/org/apache/doris/common/io/HllTest.java
+++ b/fe/fe-common/src/test/java/org/apache/doris/common/io/HllTest.java
@@ -38,7 +38,7 @@ public class HllTest {
     }
 
     @Test
-    public void HllBasicTest() throws IOException {
+    public void hllBasicTest() throws IOException {
         // test empty
         Hll emptyHll = new Hll();
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java
index 7cdd1d3a5e..23a66a3678 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java
@@ -76,8 +76,8 @@ public abstract class AlterHandler extends MasterDaemon {
         this(name, FeConstants.default_scheduler_interval_millisecond);
     }
 
-    public AlterHandler(String name, int scheduler_interval_millisecond) {
-        super(name, scheduler_interval_millisecond);
+    public AlterHandler(String name, int schedulerIntervalMillisecond) {
+        super(name, schedulerIntervalMillisecond);
     }
 
     protected void addAlterJobV2(AlterJobV2 alterJob) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java b/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java
index a2d04a12c6..4d7eb8f5a5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java
@@ -1050,8 +1050,9 @@ public class MaterializedViewHandler extends AlterHandler {
             if (cancelAlterTableStmt.getAlterJobIdList() != null) {
                 for (Long jobId : cancelAlterTableStmt.getAlterJobIdList()) {
                     AlterJobV2 alterJobV2 = getUnfinishedAlterJobV2ByJobId(jobId);
-                    if (alterJobV2 == null)
+                    if (alterJobV2 == null) {
                         continue;
+                    }
                     rollupJobV2List.add(getUnfinishedAlterJobV2ByJobId(jobId));
                 }
             } else {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java
index 6040850916..988dff5584 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java
@@ -81,41 +81,41 @@ public final class AggregateInfo extends AggregateInfoBase {
     };
 
     // created by createMergeAggInfo()
-    private AggregateInfo mergeAggInfo_;
+    private AggregateInfo mergeAggInfo;
 
     // created by createDistinctAggInfo()
-    private AggregateInfo secondPhaseDistinctAggInfo_;
+    private AggregateInfo secondPhaseDistinctAggInfo;
 
-    private final AggPhase aggPhase_;
+    private final AggPhase aggPhase;
 
     // Map from all grouping and aggregate exprs to a SlotRef referencing the corresp. slot
     // in the intermediate tuple. Identical to outputTupleSmap_ if no aggregateExpr has an
     // output type that is different from its intermediate type.
-    protected ExprSubstitutionMap intermediateTupleSmap_ = new ExprSubstitutionMap();
+    protected ExprSubstitutionMap intermediateTupleSmap = new ExprSubstitutionMap();
 
     // Map from all grouping and aggregate exprs to a SlotRef referencing the corresp. slot
     // in the output tuple.
-    protected ExprSubstitutionMap outputTupleSmap_ = new ExprSubstitutionMap();
+    protected ExprSubstitutionMap outputTupleSmap = new ExprSubstitutionMap();
 
     // Map from slots of outputTupleSmap_ to the corresponding slot in
     // intermediateTupleSmap_.
-    protected ExprSubstitutionMap outputToIntermediateTupleSmap_ =
+    protected ExprSubstitutionMap outputToIntermediateTupleSmap =
             new ExprSubstitutionMap();
 
     // if set, a subset of groupingExprs_; set and used during planning
-    private List<Expr> partitionExprs_;
+    private List<Expr> partitionExprs;
 
     // indices into aggregateExprs for those that need to be materialized;
     // shared between this, mergeAggInfo and secondPhaseDistinctAggInfo
-    private ArrayList<Integer> materializedAggregateSlots_ = Lists.newArrayList();
+    private ArrayList<Integer> materializedAggregateSlots = Lists.newArrayList();
     // if true, this AggregateInfo is the first phase of a 2-phase DISTINCT computation
     private boolean isDistinctAgg = false;
     // If true, the sql has MultiDistinct
-    private boolean isMultiDistinct_;
+    private boolean isMultiDistinct;
 
     // the multi distinct's begin pos  and end pos in groupby exprs
-    private ArrayList<Integer> firstIdx_ = Lists.newArrayList();
-    private ArrayList<Integer> lastIdx_ = Lists.newArrayList();
+    private ArrayList<Integer> firstIdx = Lists.newArrayList();
+    private ArrayList<Integer> lastIdx = Lists.newArrayList();
 
     // C'tor creates copies of groupingExprs and aggExprs.
     private AggregateInfo(ArrayList<Expr> groupingExprs,
@@ -126,8 +126,8 @@ public final class AggregateInfo extends AggregateInfoBase {
     private AggregateInfo(ArrayList<Expr> groupingExprs,
                           ArrayList<FunctionCallExpr> aggExprs, AggPhase aggPhase, boolean isMultiDistinct)  {
         super(groupingExprs, aggExprs);
-        aggPhase_ = aggPhase;
-        isMultiDistinct_ = isMultiDistinct;
+        this.aggPhase = aggPhase;
+        this.isMultiDistinct = isMultiDistinct;
     }
 
     /**
@@ -135,26 +135,26 @@ public final class AggregateInfo extends AggregateInfoBase {
      */
     private AggregateInfo(AggregateInfo other) {
         super(other);
-        if (other.mergeAggInfo_ != null) {
-            mergeAggInfo_ = other.mergeAggInfo_.clone();
+        if (other.mergeAggInfo != null) {
+            mergeAggInfo = other.mergeAggInfo.clone();
         }
-        if (other.secondPhaseDistinctAggInfo_ != null) {
-            secondPhaseDistinctAggInfo_ = other.secondPhaseDistinctAggInfo_.clone();
+        if (other.secondPhaseDistinctAggInfo != null) {
+            secondPhaseDistinctAggInfo = other.secondPhaseDistinctAggInfo.clone();
         }
-        aggPhase_ = other.aggPhase_;
-        outputTupleSmap_ = other.outputTupleSmap_.clone();
+        aggPhase = other.aggPhase;
+        outputTupleSmap = other.outputTupleSmap.clone();
         if (other.requiresIntermediateTuple()) {
-            intermediateTupleSmap_ = other.intermediateTupleSmap_.clone();
+            intermediateTupleSmap = other.intermediateTupleSmap.clone();
         } else {
-            Preconditions.checkState(other.intermediateTupleDesc_ == other.outputTupleDesc_);
-            intermediateTupleSmap_ = outputTupleSmap_;
+            Preconditions.checkState(other.intermediateTupleDesc == other.outputTupleDesc);
+            intermediateTupleSmap = outputTupleSmap;
         }
-        partitionExprs_ =
-                (other.partitionExprs_ != null) ? Expr.cloneList(other.partitionExprs_) : null;
+        partitionExprs =
+                (other.partitionExprs != null) ? Expr.cloneList(other.partitionExprs) : null;
     }
 
-    public List<Expr> getPartitionExprs() { return partitionExprs_; }
-    public void setPartitionExprs(List<Expr> exprs) { partitionExprs_ = exprs; }
+    public List<Expr> getPartitionExprs() { return partitionExprs; }
+    public void setPartitionExprs(List<Expr> exprs) { partitionExprs = exprs; }
 
     /**
      * Creates complete AggregateInfo for groupingExprs and aggExprs, including
@@ -201,8 +201,8 @@ public final class AggregateInfo extends AggregateInfoBase {
             } else {
                 // A tupleDesc should only be given for UNION DISTINCT.
                 Preconditions.checkState(aggExprs == null);
-                result.outputTupleDesc_ = tupleDesc;
-                result.intermediateTupleDesc_ = tupleDesc;
+                result.outputTupleDesc = tupleDesc;
+                result.intermediateTupleDesc = tupleDesc;
             }
             result.createMergeAggInfo(analyzer);
         } else {
@@ -212,7 +212,9 @@ public final class AggregateInfo extends AggregateInfoBase {
             Preconditions.checkState(tupleDesc == null);
             result.createDistinctAggInfo(groupingExprs, distinctAggExprs, analyzer);
         }
-        if (LOG.isDebugEnabled())  LOG.debug("agg info:\n{}", result.debugString());
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("agg info:\n{}", result.debugString());
+        }
         return result;
     }
 
@@ -309,16 +311,16 @@ public final class AggregateInfo extends AggregateInfoBase {
             }
         }
 
-        this.isMultiDistinct_= estimateIfContainsMultiDistinct(distinctAggExprs);
+        this.isMultiDistinct = estimateIfContainsMultiDistinct(distinctAggExprs);
         isDistinctAgg = true;
 
         // add DISTINCT parameters to grouping exprs
-        if (!isMultiDistinct_) {
-            groupingExprs_.addAll(expr0Children);
+        if (!isMultiDistinct) {
+            groupingExprs.addAll(expr0Children);
         }
 
         // remove DISTINCT aggregate functions from aggExprs
-        aggregateExprs_.removeAll(distinctAggExprs);
+        aggregateExprs.removeAll(distinctAggExprs);
 
         createTupleDescs(analyzer);
         createSmaps(analyzer);
@@ -328,47 +330,49 @@ public final class AggregateInfo extends AggregateInfoBase {
 
     public ArrayList<FunctionCallExpr> getMaterializedAggregateExprs() {
         ArrayList<FunctionCallExpr> result = Lists.newArrayList();
-        for (Integer i: materializedSlots_) {
-          result.add(aggregateExprs_.get(i));
+        for (Integer i: materializedSlots) {
+          result.add(aggregateExprs.get(i));
         }
         return result;
     }
 
     public AggregateInfo getMergeAggInfo() {
-        return mergeAggInfo_;
+        return mergeAggInfo;
     }
 
-    public boolean isMerge() { return aggPhase_.isMerge(); }
-    public boolean isDistinctAgg() { return secondPhaseDistinctAggInfo_ != null; }
-    public ExprSubstitutionMap getIntermediateSmap() { return intermediateTupleSmap_; }
-    public ExprSubstitutionMap getOutputSmap() { return outputTupleSmap_; }
+    public boolean isMerge() { return aggPhase.isMerge(); }
+    public boolean isDistinctAgg() { return secondPhaseDistinctAggInfo != null; }
+    public ExprSubstitutionMap getIntermediateSmap() { return intermediateTupleSmap; }
+    public ExprSubstitutionMap getOutputSmap() { return outputTupleSmap; }
     public ExprSubstitutionMap getOutputToIntermediateSmap() {
-        return outputToIntermediateTupleSmap_;
+        return outputToIntermediateTupleSmap;
     }
 
     public boolean hasAggregateExprs() {
-        return !aggregateExprs_.isEmpty() ||
-                (secondPhaseDistinctAggInfo_ != null &&
-                        !secondPhaseDistinctAggInfo_.getAggregateExprs().isEmpty());
+        return !aggregateExprs.isEmpty() ||
+                (secondPhaseDistinctAggInfo != null &&
+                        !secondPhaseDistinctAggInfo.getAggregateExprs().isEmpty());
     }
 
     public void setIsMultiDistinct(boolean value) {
-        this.isMultiDistinct_ = value;
+        this.isMultiDistinct = value;
     }
 
     public boolean isMultiDistinct() {
-        return isMultiDistinct_;
+        return isMultiDistinct;
     }
 
     public AggregateInfo getSecondPhaseDistinctAggInfo() {
-        return secondPhaseDistinctAggInfo_;
+        return secondPhaseDistinctAggInfo;
     }
 
     /**
      * Return the tuple id produced in the final aggregation step.
      */
     public TupleId getResultTupleId() {
-        if (isDistinctAgg()) return secondPhaseDistinctAggInfo_.getOutputTupleId();
+        if (isDistinctAgg()) {
+            return secondPhaseDistinctAggInfo.getOutputTupleId();
+        }
         return getOutputTupleId();
     }
 
@@ -377,14 +381,14 @@ public final class AggregateInfo extends AggregateInfoBase {
      * of performing the aggregate computation described by this AggregateInfo.
      */
     public void getRefdSlots(List<SlotId> ids) {
-        Preconditions.checkState(outputTupleDesc_ != null);
-        if (groupingExprs_ != null) {
-            Expr.getIds(groupingExprs_, null, ids);
+        Preconditions.checkState(outputTupleDesc != null);
+        if (groupingExprs != null) {
+            Expr.getIds(groupingExprs, null, ids);
         }
-        Expr.getIds(aggregateExprs_, null, ids);
+        Expr.getIds(aggregateExprs, null, ids);
         // The backend assumes that the entire aggTupleDesc is materialized
-        for (int i = 0; i < outputTupleDesc_.getSlots().size(); ++i) {
-            ids.add(outputTupleDesc_.getSlots().get(i).getId());
+        for (int i = 0; i < outputTupleDesc.getSlots().size(); ++i) {
+            ids.add(outputTupleDesc.getSlots().get(i).getId());
         }
     }
 
@@ -408,24 +412,24 @@ public final class AggregateInfo extends AggregateInfoBase {
      *     aggTupleDesc
      */
     public void substitute(ExprSubstitutionMap smap, Analyzer analyzer) {
-        groupingExprs_ = Expr.substituteList(groupingExprs_, smap, analyzer, true);
+        groupingExprs = Expr.substituteList(groupingExprs, smap, analyzer, true);
         if (LOG.isTraceEnabled()) {
-            LOG.trace("AggInfo: grouping_exprs=" + Expr.debugString(groupingExprs_));
+            LOG.trace("AggInfo: grouping_exprs=" + Expr.debugString(groupingExprs));
         }
 
         // The smap in this case should not substitute the aggs themselves, only
         // their subexpressions.
         List<Expr> substitutedAggs =
-            Expr.substituteList(aggregateExprs_, smap, analyzer, false);
-        aggregateExprs_.clear();
+            Expr.substituteList(aggregateExprs, smap, analyzer, false);
+        aggregateExprs.clear();
         for (Expr substitutedAgg: substitutedAggs) {
-            aggregateExprs_.add((FunctionCallExpr) substitutedAgg);
+            aggregateExprs.add((FunctionCallExpr) substitutedAgg);
         }
 
-        outputTupleSmap_.substituteLhs(smap, analyzer);
-        intermediateTupleSmap_.substituteLhs(smap, analyzer);
-        if (secondPhaseDistinctAggInfo_ != null) {
-            secondPhaseDistinctAggInfo_.substitute(smap, analyzer);
+        outputTupleSmap.substituteLhs(smap, analyzer);
+        intermediateTupleSmap.substituteLhs(smap, analyzer);
+        if (secondPhaseDistinctAggInfo != null) {
+            secondPhaseDistinctAggInfo.substitute(smap, analyzer);
         }
     }
 
@@ -441,8 +445,8 @@ public final class AggregateInfo extends AggregateInfoBase {
      * createAggTupleDesc() must not be called on it.
      */
     private void createMergeAggInfo(Analyzer analyzer)  {
-        Preconditions.checkState(mergeAggInfo_ == null);
-        TupleDescriptor inputDesc = intermediateTupleDesc_;
+        Preconditions.checkState(mergeAggInfo == null);
+        TupleDescriptor inputDesc = intermediateTupleDesc;
         // construct grouping exprs
         ArrayList<Expr> groupingExprs = Lists.newArrayList();
         for (int i = 0; i < getGroupingExprs().size(); ++i) {
@@ -470,13 +474,13 @@ public final class AggregateInfo extends AggregateInfoBase {
         }
 
         AggPhase aggPhase =
-                (aggPhase_ == AggPhase.FIRST) ? AggPhase.FIRST_MERGE : AggPhase.SECOND_MERGE;
-        mergeAggInfo_ = new AggregateInfo(groupingExprs, aggExprs, aggPhase, isMultiDistinct_);
-        mergeAggInfo_.intermediateTupleDesc_ = intermediateTupleDesc_;
-        mergeAggInfo_.outputTupleDesc_ = outputTupleDesc_;
-        mergeAggInfo_.intermediateTupleSmap_ = intermediateTupleSmap_;
-        mergeAggInfo_.outputTupleSmap_ = outputTupleSmap_;
-        mergeAggInfo_.materializedSlots_ = materializedSlots_;
+                (this.aggPhase == AggPhase.FIRST) ? AggPhase.FIRST_MERGE : AggPhase.SECOND_MERGE;
+        mergeAggInfo = new AggregateInfo(groupingExprs, aggExprs, aggPhase, isMultiDistinct);
+        mergeAggInfo.intermediateTupleDesc = intermediateTupleDesc;
+        mergeAggInfo.outputTupleDesc = outputTupleDesc;
+        mergeAggInfo.intermediateTupleSmap = intermediateTupleSmap;
+        mergeAggInfo.outputTupleSmap = outputTupleSmap;
+        mergeAggInfo.materializedSlots = materializedSlots;
     }
 
     /**
@@ -528,11 +532,11 @@ public final class AggregateInfo extends AggregateInfoBase {
     private void createSecondPhaseAggInfo(
             ArrayList<Expr> origGroupingExprs,
             ArrayList<FunctionCallExpr> distinctAggExprs, Analyzer analyzer) throws AnalysisException {
-        Preconditions.checkState(secondPhaseDistinctAggInfo_ == null);
+        Preconditions.checkState(secondPhaseDistinctAggInfo == null);
         Preconditions.checkState(!distinctAggExprs.isEmpty());
 
         // The output of the 1st phase agg is the 1st phase intermediate.
-        TupleDescriptor inputDesc = intermediateTupleDesc_;
+        TupleDescriptor inputDesc = intermediateTupleDesc;
 
         // construct agg exprs for original DISTINCT aggregate functions
         // (these aren't part of this.aggExprs)
@@ -541,7 +545,7 @@ public final class AggregateInfo extends AggregateInfoBase {
         for (FunctionCallExpr inputExpr : distinctAggExprs) {
             Preconditions.checkState(inputExpr.isAggregateFunction());
             FunctionCallExpr aggExpr = null;
-            if (!isMultiDistinct_) {
+            if (!isMultiDistinct) {
                 if (inputExpr.getFnName().getFunction().equalsIgnoreCase(FunctionSet.COUNT)) {
                     // COUNT(DISTINCT ...) ->
                     // COUNT(IF(IsNull(<agg slot 1>), NULL, IF(IsNull(<agg slot 2>), NULL, ...)))
@@ -561,7 +565,9 @@ public final class AggregateInfo extends AggregateInfoBase {
                     // tuple reference is correct.
                     exprList.add(new SlotRef(inputDesc.getSlots().get(origGroupingExprs.size())));
                     // Check if user provided a custom separator
-                    if (inputExpr.getChildren().size() == 2) exprList.add(inputExpr.getChild(1));
+                    if (inputExpr.getChildren().size() == 2) {
+                        exprList.add(inputExpr.getChild(1));
+                    }
                     aggExpr = new FunctionCallExpr(inputExpr.getFnName(), exprList);
                 } else {
                     // SUM(DISTINCT <expr>) -> SUM(<last grouping slot>);
@@ -578,8 +584,8 @@ public final class AggregateInfo extends AggregateInfoBase {
         }
 
         // map all the remaining agg fns
-        for (int i = 0; i < aggregateExprs_.size(); ++i) {
-            FunctionCallExpr inputExpr = aggregateExprs_.get(i);
+        for (int i = 0; i < aggregateExprs.size(); ++i) {
+            FunctionCallExpr inputExpr = aggregateExprs.get(i);
             Preconditions.checkState(inputExpr.isAggregateFunction());
             // we're aggregating an output slot of the 1st agg phase
             Expr aggExprParam =
@@ -589,7 +595,7 @@ public final class AggregateInfo extends AggregateInfoBase {
             secondPhaseAggExprs.add(aggExpr);
         }
         Preconditions.checkState(
-                secondPhaseAggExprs.size() == aggregateExprs_.size() + distinctAggExprs.size());
+                secondPhaseAggExprs.size() == aggregateExprs.size() + distinctAggExprs.size());
 
         for (FunctionCallExpr aggExpr : secondPhaseAggExprs) {
             aggExpr.analyzeNoThrow(analyzer);
@@ -597,12 +603,12 @@ public final class AggregateInfo extends AggregateInfoBase {
         }
 
         ArrayList<Expr> substGroupingExprs =
-                Expr.substituteList(origGroupingExprs, intermediateTupleSmap_, analyzer, false);
-        secondPhaseDistinctAggInfo_ =
-                new AggregateInfo(substGroupingExprs, secondPhaseAggExprs, AggPhase.SECOND, isMultiDistinct_);
-        secondPhaseDistinctAggInfo_.createTupleDescs(analyzer);
-        secondPhaseDistinctAggInfo_.createSecondPhaseAggSMap(this, distinctAggExprs);
-        secondPhaseDistinctAggInfo_.createMergeAggInfo(analyzer);
+                Expr.substituteList(origGroupingExprs, intermediateTupleSmap, analyzer, false);
+        secondPhaseDistinctAggInfo =
+                new AggregateInfo(substGroupingExprs, secondPhaseAggExprs, AggPhase.SECOND, isMultiDistinct);
+        secondPhaseDistinctAggInfo.createTupleDescs(analyzer);
+        secondPhaseDistinctAggInfo.createSecondPhaseAggSMap(this, distinctAggExprs);
+        secondPhaseDistinctAggInfo.createMergeAggInfo(analyzer);
     }
 
     /**
@@ -611,12 +617,12 @@ public final class AggregateInfo extends AggregateInfoBase {
      */
     private void createSecondPhaseAggSMap(
             AggregateInfo inputAggInfo, ArrayList<FunctionCallExpr> distinctAggExprs) {
-        outputTupleSmap_.clear();
+        outputTupleSmap.clear();
         int slotIdx = 0;
-        ArrayList<SlotDescriptor> slotDescs = outputTupleDesc_.getSlots();
+        ArrayList<SlotDescriptor> slotDescs = outputTupleDesc.getSlots();
 
         int numDistinctParams = 0;
-        if (!isMultiDistinct_) {
+        if (!isMultiDistinct) {
             numDistinctParams = distinctAggExprs.get(0).getChildren().size();
             // If we are counting distinct params of group_concat, we cannot include the custom
             // separator since it is not a distinct param.
@@ -638,20 +644,20 @@ public final class AggregateInfo extends AggregateInfoBase {
         // original grouping exprs -> first m slots
         for (int i = 0; i < numOrigGroupingExprs; ++i, ++slotIdx) {
             Expr groupingExpr = inputAggInfo.getGroupingExprs().get(i);
-            outputTupleSmap_.put(
+            outputTupleSmap.put(
                     groupingExpr.clone(), new SlotRef(slotDescs.get(slotIdx)));
         }
 
         // distinct agg exprs -> next n slots
         for (int i = 0; i < distinctAggExprs.size(); ++i, ++slotIdx) {
             Expr aggExpr = distinctAggExprs.get(i);
-            outputTupleSmap_.put(aggExpr.clone(), (new SlotRef(slotDescs.get(slotIdx))));
+            outputTupleSmap.put(aggExpr.clone(), (new SlotRef(slotDescs.get(slotIdx))));
         }
 
         // remaining agg exprs -> remaining slots
         for (int i = 0; i < inputAggInfo.getAggregateExprs().size(); ++i, ++slotIdx) {
             Expr aggExpr = inputAggInfo.getAggregateExprs().get(i);
-            outputTupleSmap_.put(aggExpr.clone(), new SlotRef(slotDescs.get(slotIdx)));
+            outputTupleSmap.put(aggExpr.clone(), new SlotRef(slotDescs.get(slotIdx)));
         }
     }
 
@@ -662,40 +668,44 @@ public final class AggregateInfo extends AggregateInfoBase {
      * predicates between the grouping slots of the two tuples.
      */
     public void createSmaps(Analyzer analyzer) {
-        Preconditions.checkNotNull(outputTupleDesc_);
-        Preconditions.checkNotNull(intermediateTupleDesc_);
+        Preconditions.checkNotNull(outputTupleDesc);
+        Preconditions.checkNotNull(intermediateTupleDesc);
 
         List<Expr> exprs = Lists.newArrayListWithCapacity(
-                groupingExprs_.size() + aggregateExprs_.size());
-        exprs.addAll(groupingExprs_);
-        exprs.addAll(aggregateExprs_);
+                groupingExprs.size() + aggregateExprs.size());
+        exprs.addAll(groupingExprs);
+        exprs.addAll(aggregateExprs);
         for (int i = 0; i < exprs.size(); ++i) {
             Expr expr = exprs.get(i);
             if (expr.isImplicitCast()) {
-                outputTupleSmap_.put(expr.getChild(0).clone(),
-                        new SlotRef(outputTupleDesc_.getSlots().get(i)));
+                outputTupleSmap.put(expr.getChild(0).clone(),
+                        new SlotRef(outputTupleDesc.getSlots().get(i)));
             } else {
-                outputTupleSmap_.put(expr.clone(),
-                        new SlotRef(outputTupleDesc_.getSlots().get(i)));
+                outputTupleSmap.put(expr.clone(),
+                        new SlotRef(outputTupleDesc.getSlots().get(i)));
+            }
+            if (!requiresIntermediateTuple()) {
+                continue;
             }
-            if (!requiresIntermediateTuple()) continue;
-
-            intermediateTupleSmap_.put(expr.clone(),
-                    new SlotRef(intermediateTupleDesc_.getSlots().get(i)));
-            outputToIntermediateTupleSmap_.put(
-                    new SlotRef(outputTupleDesc_.getSlots().get(i)),
-                    new SlotRef(intermediateTupleDesc_.getSlots().get(i)));
-            if (i < groupingExprs_.size()) {
+
+            intermediateTupleSmap.put(expr.clone(),
+                    new SlotRef(intermediateTupleDesc.getSlots().get(i)));
+            outputToIntermediateTupleSmap.put(
+                    new SlotRef(outputTupleDesc.getSlots().get(i)),
+                    new SlotRef(intermediateTupleDesc.getSlots().get(i)));
+            if (i < groupingExprs.size()) {
                 analyzer.createAuxEquivPredicate(
-                        new SlotRef(outputTupleDesc_.getSlots().get(i)),
-                        new SlotRef(intermediateTupleDesc_.getSlots().get(i)));
+                        new SlotRef(outputTupleDesc.getSlots().get(i)),
+                        new SlotRef(intermediateTupleDesc.getSlots().get(i)));
             }
         }
-        if (!requiresIntermediateTuple()) intermediateTupleSmap_ = outputTupleSmap_;
+        if (!requiresIntermediateTuple()) {
+            intermediateTupleSmap = outputTupleSmap;
+        }
 
         if (LOG.isTraceEnabled()) {
-            LOG.trace("output smap=" + outputTupleSmap_.debugString());
-            LOG.trace("intermediate smap=" + intermediateTupleSmap_.debugString());
+            LOG.trace("output smap=" + outputTupleSmap.debugString());
+            LOG.trace("intermediate smap=" + intermediateTupleSmap.debugString());
         }
     }
 
@@ -712,7 +722,7 @@ public final class AggregateInfo extends AggregateInfoBase {
      * - Currently only the sum function will involve this problem.
      */
     public void updateTypeOfAggregateExprs() {
-        for (FunctionCallExpr functionCallExpr : aggregateExprs_) {
+        for (FunctionCallExpr functionCallExpr : aggregateExprs) {
             if (!functionCallExpr.getFnName().getFunction().equalsIgnoreCase("sum")) {
                 continue;
             }
@@ -744,43 +754,45 @@ public final class AggregateInfo extends AggregateInfoBase {
      */
     @Override
     public void materializeRequiredSlots(Analyzer analyzer, ExprSubstitutionMap smap) {
-        for (int i = 0; i < groupingExprs_.size(); ++i) {
-            outputTupleDesc_.getSlots().get(i).setIsMaterialized(true);
-            intermediateTupleDesc_.getSlots().get(i).setIsMaterialized(true);
+        for (int i = 0; i < groupingExprs.size(); ++i) {
+            outputTupleDesc.getSlots().get(i).setIsMaterialized(true);
+            intermediateTupleDesc.getSlots().get(i).setIsMaterialized(true);
         }
 
         // collect input exprs: grouping exprs plus aggregate exprs that need to be
         // materialized
-        materializedSlots_.clear();
+        materializedSlots.clear();
         List<Expr> exprs = Lists.newArrayList();
-        exprs.addAll(groupingExprs_);
+        exprs.addAll(groupingExprs);
 
-        int aggregateExprsSize = aggregateExprs_.size();
-        int groupExprsSize = groupingExprs_.size();
+        int aggregateExprsSize = aggregateExprs.size();
+        int groupExprsSize = groupingExprs.size();
         boolean isDistinctAgg = isDistinctAgg();
         for (int i = 0; i < aggregateExprsSize; ++i) {
-            FunctionCallExpr functionCallExpr = aggregateExprs_.get(i);
+            FunctionCallExpr functionCallExpr = aggregateExprs.get(i);
             SlotDescriptor slotDesc =
-                    outputTupleDesc_.getSlots().get(groupExprsSize + i);
+                    outputTupleDesc.getSlots().get(groupExprsSize + i);
             SlotDescriptor intermediateSlotDesc =
-                    intermediateTupleDesc_.getSlots().get(groupExprsSize + i);
-            if (isDistinctAgg || isMultiDistinct_) {
+                    intermediateTupleDesc.getSlots().get(groupExprsSize + i);
+            if (isDistinctAgg || isMultiDistinct) {
                 slotDesc.setIsMaterialized(true);
                 intermediateSlotDesc.setIsMaterialized(true);
             }
 
-            if (!slotDesc.isMaterialized()) continue;
+            if (!slotDesc.isMaterialized()) {
+                continue;
+            }
 
             intermediateSlotDesc.setIsMaterialized(true);
             exprs.add(functionCallExpr);
-            materializedSlots_.add(i);
+            materializedSlots.add(i);
         }
 
         List<Expr> resolvedExprs = Expr.substituteList(exprs, smap, analyzer, false);
         analyzer.materializeSlots(resolvedExprs);
 
         if (isDistinctAgg()) {
-            secondPhaseDistinctAggInfo_.materializeRequiredSlots(analyzer, null);
+            secondPhaseDistinctAggInfo.materializeRequiredSlots(analyzer, null);
         }
     }
 
@@ -791,26 +803,26 @@ public final class AggregateInfo extends AggregateInfoBase {
      * because we could derive both hash and order-based partitions
      */
     public DataPartition getPartition() {
-        if (groupingExprs_.isEmpty()) {
+        if (groupingExprs.isEmpty()) {
             return DataPartition.UNPARTITIONED;
         } else {
-            return new DataPartition(TPartitionType.HASH_PARTITIONED, groupingExprs_);
+            return new DataPartition(TPartitionType.HASH_PARTITIONED, groupingExprs);
         }
     }
 
     public String debugString() {
         StringBuilder out = new StringBuilder(super.debugString());
         out.append(MoreObjects.toStringHelper(this)
-                .add("phase", aggPhase_)
-                .add("intermediate_smap", intermediateTupleSmap_.debugString())
-                .add("output_smap", outputTupleSmap_.debugString())
+                .add("phase", aggPhase)
+                .add("intermediate_smap", intermediateTupleSmap.debugString())
+                .add("output_smap", outputTupleSmap.debugString())
                 .toString());
-        if (mergeAggInfo_ != this && mergeAggInfo_ != null) {
-            out.append("\nmergeAggInfo:\n" + mergeAggInfo_.debugString());
+        if (mergeAggInfo != this && mergeAggInfo != null) {
+            out.append("\nmergeAggInfo:\n" + mergeAggInfo.debugString());
         }
-        if (secondPhaseDistinctAggInfo_ != null) {
+        if (secondPhaseDistinctAggInfo != null) {
             out.append("\nsecondPhaseDistinctAggInfo:\n"
-                    + secondPhaseDistinctAggInfo_.debugString());
+                    + secondPhaseDistinctAggInfo.debugString());
         }
         return out.toString();
     }
@@ -826,6 +838,6 @@ public final class AggregateInfo extends AggregateInfoBase {
     }
 
     public List<Expr> getInputPartitionExprs() {
-        return partitionExprs_ != null ? partitionExprs_ : groupingExprs_;
+        return partitionExprs != null ? partitionExprs : groupingExprs;
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java
index 9961931b29..7fa14b758d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java
@@ -43,12 +43,12 @@ public abstract class AggregateInfoBase {
 
     // For aggregations: All unique grouping expressions from a select block.
     // For analytics: Empty.
-    protected ArrayList<Expr> groupingExprs_;
+    protected ArrayList<Expr> groupingExprs;
 
     // For aggregations: All unique aggregate expressions from a select block.
     // For analytics: The results of AnalyticExpr.getFnCall() for the unique
     // AnalyticExprs of a select block.
-    protected ArrayList<FunctionCallExpr> aggregateExprs_;
+    protected ArrayList<FunctionCallExpr> aggregateExprs;
 
     // The tuple into which the intermediate output of an aggregation is materialized.
     // Contains groupingExprs.size() + aggregateExprs.size() slots, the first of which
@@ -57,28 +57,28 @@ public abstract class AggregateInfoBase {
     // of the aggregate functions' intermediate types.
     // Identical to outputTupleDesc_ if no aggregateExpr has an output type that is
     // different from its intermediate type.
-    protected TupleDescriptor intermediateTupleDesc_;
+    protected TupleDescriptor intermediateTupleDesc;
 
     // The tuple into which the final output of the aggregation is materialized.
     // Contains groupingExprs.size() + aggregateExprs.size() slots, the first of which
     // contain the values of the grouping exprs, followed by slots into which the
     // aggregateExprs' finalize() symbol write its result, i.e., slots of the aggregate
     // functions' output types.
-    protected TupleDescriptor outputTupleDesc_;
+    protected TupleDescriptor outputTupleDesc;
 
     // For aggregation: indices into aggregate exprs for that need to be materialized
     // For analytics: indices into the analytic exprs and their corresponding aggregate
     // exprs that need to be materialized.
     // Populated in materializeRequiredSlots() which must be implemented by subclasses.
-    protected ArrayList<Integer> materializedSlots_ = Lists.newArrayList();
+    protected ArrayList<Integer> materializedSlots = Lists.newArrayList();
 
     protected AggregateInfoBase(ArrayList<Expr> groupingExprs,
                                 ArrayList<FunctionCallExpr> aggExprs)  {
         Preconditions.checkState(groupingExprs != null || aggExprs != null);
-        groupingExprs_ =
+        this.groupingExprs =
                 groupingExprs != null ? Expr.cloneList(groupingExprs) : new ArrayList<Expr>();
         Preconditions.checkState(aggExprs != null || !(this instanceof AnalyticInfo));
-        aggregateExprs_ =
+        aggregateExprs =
                 aggExprs != null ? Expr.cloneList(aggExprs) : new ArrayList<FunctionCallExpr>();
     }
 
@@ -86,13 +86,13 @@ public abstract class AggregateInfoBase {
      * C'tor for cloning.
      */
     protected AggregateInfoBase(AggregateInfoBase other) {
-        groupingExprs_ =
-                (other.groupingExprs_ != null) ? Expr.cloneList(other.groupingExprs_) : null;
-        aggregateExprs_ =
-                (other.aggregateExprs_ != null) ? Expr.cloneList(other.aggregateExprs_) : null;
-        intermediateTupleDesc_ = other.intermediateTupleDesc_;
-        outputTupleDesc_ = other.outputTupleDesc_;
-        materializedSlots_ = Lists.newArrayList(other.materializedSlots_);
+        groupingExprs =
+                (other.groupingExprs != null) ? Expr.cloneList(other.groupingExprs) : null;
+        aggregateExprs =
+                (other.aggregateExprs != null) ? Expr.cloneList(other.aggregateExprs) : null;
+        intermediateTupleDesc = other.intermediateTupleDesc;
+        outputTupleDesc = other.outputTupleDesc;
+        materializedSlots = Lists.newArrayList(other.materializedSlots);
     }
 
     /**
@@ -103,11 +103,11 @@ public abstract class AggregateInfoBase {
     protected void createTupleDescs(Analyzer analyzer) {
         // Create the intermediate tuple desc first, so that the tuple ids are increasing
         // from bottom to top in the plan tree.
-        intermediateTupleDesc_ = createTupleDesc(analyzer, false);
-        if (requiresIntermediateTuple(aggregateExprs_, groupingExprs_.size() == 0)) {
-            outputTupleDesc_ = createTupleDesc(analyzer, true);
+        intermediateTupleDesc = createTupleDesc(analyzer, false);
+        if (requiresIntermediateTuple(aggregateExprs, groupingExprs.size() == 0)) {
+            outputTupleDesc = createTupleDesc(analyzer, true);
         } else {
-            outputTupleDesc_ = intermediateTupleDesc_;
+            outputTupleDesc = intermediateTupleDesc;
         }
     }
 
@@ -122,15 +122,15 @@ public abstract class AggregateInfoBase {
                 analyzer.getDescTbl().createTupleDescriptor(
                         tupleDebugName() + (isOutputTuple ? "-out" : "-intermed"));
         List<Expr> exprs = Lists.newArrayListWithCapacity(
-                groupingExprs_.size() + aggregateExprs_.size());
-        exprs.addAll(groupingExprs_);
-        exprs.addAll(aggregateExprs_);
+                groupingExprs.size() + aggregateExprs.size());
+        exprs.addAll(groupingExprs);
+        exprs.addAll(aggregateExprs);
 
-        int aggregateExprStartIndex = groupingExprs_.size();
+        int aggregateExprStartIndex = groupingExprs.size();
         // if agg is grouping set, so we should set all groupingExpr unless last groupingExpr
         // must set be be nullable
-        boolean isGroupingSet = !groupingExprs_.isEmpty() &&
-                groupingExprs_.get(groupingExprs_.size() - 1) instanceof VirtualSlotRef;
+        boolean isGroupingSet = !groupingExprs.isEmpty() &&
+                groupingExprs.get(groupingExprs.size() - 1) instanceof VirtualSlotRef;
 
         for (int i = 0; i < exprs.size(); ++i) {
             Expr expr = exprs.get(i);
@@ -160,7 +160,7 @@ public abstract class AggregateInfoBase {
                 }
 
                 if (isOutputTuple && aggExpr.getFn().getNullableMode().equals(Function.NullableMode.DEPEND_ON_ARGUMENT) &&
-                        groupingExprs_.size() == 0) {
+                        groupingExprs.size() == 0) {
                     slotDesc.setIsNullable(true);
                 }
 
@@ -193,16 +193,16 @@ public abstract class AggregateInfoBase {
     public abstract void materializeRequiredSlots(Analyzer analyzer,
                                                   ExprSubstitutionMap smap);
 
-    public ArrayList<Expr> getGroupingExprs() { return groupingExprs_; }
-    public ArrayList<FunctionCallExpr> getAggregateExprs() { return aggregateExprs_; }
-    public TupleDescriptor getOutputTupleDesc() { return outputTupleDesc_; }
-    public TupleDescriptor getIntermediateTupleDesc() { return intermediateTupleDesc_; }
-    public TupleId getIntermediateTupleId() { return intermediateTupleDesc_.getId(); }
-    public TupleId getOutputTupleId() { return outputTupleDesc_.getId(); }
+    public ArrayList<Expr> getGroupingExprs() { return groupingExprs; }
+    public ArrayList<FunctionCallExpr> getAggregateExprs() { return aggregateExprs; }
+    public TupleDescriptor getOutputTupleDesc() { return outputTupleDesc; }
+    public TupleDescriptor getIntermediateTupleDesc() { return intermediateTupleDesc; }
+    public TupleId getIntermediateTupleId() { return intermediateTupleDesc.getId(); }
+    public TupleId getOutputTupleId() { return outputTupleDesc.getId(); }
     public boolean requiresIntermediateTuple() {
-        Preconditions.checkNotNull(intermediateTupleDesc_);
-        Preconditions.checkNotNull(outputTupleDesc_);
-        return intermediateTupleDesc_ != outputTupleDesc_;
+        Preconditions.checkNotNull(intermediateTupleDesc);
+        Preconditions.checkNotNull(outputTupleDesc);
+        return intermediateTupleDesc != outputTupleDesc;
     }
 
     /**
@@ -213,7 +213,9 @@ public abstract class AggregateInfoBase {
     public static <T extends Expr> boolean requiresIntermediateTuple(List<T> aggExprs) {
         for (Expr aggExpr: aggExprs) {
             Type intermediateType = ((AggregateFunction) aggExpr.fn).getIntermediateType();
-            if (intermediateType != null) return true;
+            if (intermediateType != null) {
+                return true;
+            }
         }
         return false;
     }
@@ -225,7 +227,9 @@ public abstract class AggregateInfoBase {
     public static <T extends Expr> boolean requiresIntermediateTuple(List<T> aggExprs, boolean noGrouping) {
         for (Expr aggExpr: aggExprs) {
             Type intermediateType = ((AggregateFunction) aggExpr.fn).getIntermediateType();
-            if (intermediateType != null) return true;
+            if (intermediateType != null) {
+                return true;
+            }
             if (noGrouping && ((AggregateFunction) aggExpr.fn).getNullableMode().equals(Function.NullableMode.DEPEND_ON_ARGUMENT)) {
                 return true;
             }
@@ -236,12 +240,12 @@ public abstract class AggregateInfoBase {
     public String debugString() {
         StringBuilder out = new StringBuilder();
         out.append(MoreObjects.toStringHelper(this)
-                .add("grouping_exprs", Expr.debugString(groupingExprs_))
-                .add("aggregate_exprs", Expr.debugString(aggregateExprs_))
-                .add("intermediate_tuple", (intermediateTupleDesc_ == null)
-                        ? "null" : intermediateTupleDesc_.debugString())
-                .add("output_tuple", (outputTupleDesc_ == null)
-                        ? "null" : outputTupleDesc_.debugString())
+                .add("grouping_exprs", Expr.debugString(groupingExprs))
+                .add("aggregate_exprs", Expr.debugString(aggregateExprs))
+                .add("intermediate_tuple", (intermediateTupleDesc == null)
+                        ? "null" : intermediateTupleDesc.debugString())
+                .add("output_tuple", (outputTupleDesc == null)
+                        ? "null" : outputTupleDesc.debugString())
                 .toString());
         return out.toString();
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java
index 5279ba2b1f..bb3cf7730f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java
@@ -41,6 +41,7 @@ import org.slf4j.LoggerFactory;
 import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Objects;
 
 /**
  * Representation of an analytic function call with OVER clause.
@@ -142,6 +143,11 @@ public class AnalyticExpr extends Expr {
         return window;
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(super.hashCode(), fnCall, orderByElements, window);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (!super.equals(obj)) {
@@ -868,8 +874,9 @@ public class AnalyticExpr extends Expr {
     }
 
     private String exprListToSql(List<? extends Expr> exprs) {
-        if (exprs == null || exprs.isEmpty())
+        if (exprs == null || exprs.isEmpty()) {
             return "";
+        }
         List<String> strings = Lists.newArrayList();
         for (Expr expr : exprs) {
             strings.add(expr.toSql());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java
index 4e84643917..0b3cf4f1bc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java
@@ -41,23 +41,23 @@ public final class AnalyticInfo extends AggregateInfoBase {
     // All unique analytic exprs of a select block. Used to populate
     // super.aggregateExprs_ based on AnalyticExpr.getFnCall() for each analytic expr
     // in this list.
-    private final ArrayList<Expr> analyticExprs_;
+    private final ArrayList<Expr> analyticExprs;
 
     // Intersection of the partition exps of all the analytic functions.
-    private final List<Expr> commonPartitionExprs_;
+    private final List<Expr> commonPartitionExprs;
 
     // map from analyticExprs_ to their corresponding analytic tuple slotrefs
-    private final ExprSubstitutionMap analyticTupleSmap_;
+    private final ExprSubstitutionMap analyticTupleSmap;
 
     private AnalyticInfo(ArrayList<Expr> analyticExprs) {
         super(new ArrayList<Expr>(), new ArrayList<FunctionCallExpr>());
-        analyticExprs_ = Expr.cloneList(analyticExprs);
+        this.analyticExprs = Expr.cloneList(analyticExprs);
         // Extract the analytic function calls for each analytic expr.
         for (Expr analyticExpr: analyticExprs) {
-            aggregateExprs_.add(((AnalyticExpr) analyticExpr).getFnCall());
+            aggregateExprs.add(((AnalyticExpr) analyticExpr).getFnCall());
         }
-        analyticTupleSmap_ = new ExprSubstitutionMap();
-        commonPartitionExprs_ = computeCommonPartitionExprs();
+        analyticTupleSmap = new ExprSubstitutionMap();
+        commonPartitionExprs = computeCommonPartitionExprs();
     }
 
     /**
@@ -65,15 +65,15 @@ public final class AnalyticInfo extends AggregateInfoBase {
      */
     private AnalyticInfo(AnalyticInfo other) {
         super(other);
-        analyticExprs_ =
-                (other.analyticExprs_ != null) ? Expr.cloneList(other.analyticExprs_) : null;
-        analyticTupleSmap_ = other.analyticTupleSmap_.clone();
-        commonPartitionExprs_ = Expr.cloneList(other.commonPartitionExprs_);
+        analyticExprs =
+                (other.analyticExprs != null) ? Expr.cloneList(other.analyticExprs) : null;
+        analyticTupleSmap = other.analyticTupleSmap.clone();
+        commonPartitionExprs = Expr.cloneList(other.commonPartitionExprs);
     }
 
-    public ArrayList<Expr> getAnalyticExprs() { return analyticExprs_; }
-    public ExprSubstitutionMap getSmap() { return analyticTupleSmap_; }
-    public List<Expr> getCommonPartitionExprs() { return commonPartitionExprs_; }
+    public ArrayList<Expr> getAnalyticExprs() { return analyticExprs; }
+    public ExprSubstitutionMap getSmap() { return analyticTupleSmap; }
+    public List<Expr> getCommonPartitionExprs() { return commonPartitionExprs; }
 
     /**
      * Creates complete AnalyticInfo for analyticExprs, including tuple descriptors and
@@ -88,20 +88,20 @@ public final class AnalyticInfo extends AggregateInfoBase {
 
         // The tuple descriptors are logical. Their slots are remapped to physical tuples
         // during plan generation.
-        result.outputTupleDesc_.setIsMaterialized(false);
-        result.intermediateTupleDesc_.setIsMaterialized(false);
+        result.outputTupleDesc.setIsMaterialized(false);
+        result.intermediateTupleDesc.setIsMaterialized(false);
 
         // Populate analyticTupleSmap_
-        Preconditions.checkState(analyticExprs.size() == result.outputTupleDesc_.getSlots().size());
+        Preconditions.checkState(analyticExprs.size() == result.outputTupleDesc.getSlots().size());
         for (int i = 0; i < analyticExprs.size(); ++i) {
-            result.analyticTupleSmap_.put(result.analyticExprs_.get(i),
-                    new SlotRef(result.outputTupleDesc_.getSlots().get(i)));
-            result.outputTupleDesc_.getSlots().get(i).setSourceExpr(result.analyticExprs_.get(i));
+            result.analyticTupleSmap.put(result.analyticExprs.get(i),
+                    new SlotRef(result.outputTupleDesc.getSlots().get(i)));
+            result.outputTupleDesc.getSlots().get(i).setSourceExpr(result.analyticExprs.get(i));
         }
 
         if (LOG.isDebugEnabled()) {
-            LOG.debug("analytictuple=" + result.outputTupleDesc_.debugString());
-            LOG.debug("analytictuplesmap=" + result.analyticTupleSmap_.debugString());
+            LOG.debug("analytictuple=" + result.outputTupleDesc.debugString());
+            LOG.debug("analytictuplesmap=" + result.analyticTupleSmap.debugString());
             LOG.debug("analytic info:\n" + result.debugString());
         }
         return result;
@@ -113,15 +113,19 @@ public final class AnalyticInfo extends AggregateInfoBase {
      */
     private List<Expr> computeCommonPartitionExprs() {
         List<Expr> result = Lists.newArrayList();
-        for (Expr analyticExpr: analyticExprs_) {
+        for (Expr analyticExpr: analyticExprs) {
             Preconditions.checkState(analyticExpr.isAnalyzed());
             List<Expr> partitionExprs = ((AnalyticExpr) analyticExpr).getPartitionExprs();
-            if (partitionExprs == null) continue;
+            if (partitionExprs == null) {
+                continue;
+            }
             if (result.isEmpty()) {
                 result.addAll(partitionExprs);
             } else {
                 result.retainAll(partitionExprs);
-                if (result.isEmpty()) break;
+                if (result.isEmpty()) {
+                    break;
+                }
             }
         }
         return result;
@@ -129,14 +133,16 @@ public final class AnalyticInfo extends AggregateInfoBase {
 
     @Override
     public void materializeRequiredSlots(Analyzer analyzer, ExprSubstitutionMap smap) {
-        materializedSlots_.clear();
+        materializedSlots.clear();
         List<Expr> exprs = Lists.newArrayList();
-        for (int i = 0; i < analyticExprs_.size(); ++i) {
-            SlotDescriptor outputSlotDesc = outputTupleDesc_.getSlots().get(i);
-            if (!outputSlotDesc.isMaterialized()) continue;
-            intermediateTupleDesc_.getSlots().get(i).setIsMaterialized(true);
-            exprs.add(analyticExprs_.get(i));
-            materializedSlots_.add(i);
+        for (int i = 0; i < analyticExprs.size(); ++i) {
+            SlotDescriptor outputSlotDesc = outputTupleDesc.getSlots().get(i);
+            if (!outputSlotDesc.isMaterialized()) {
+                continue;
+            }
+            intermediateTupleDesc.getSlots().get(i).setIsMaterialized(true);
+            exprs.add(analyticExprs.get(i));
+            materializedSlots.add(i);
         }
         List<Expr> resolvedExprs = Expr.substituteList(exprs, smap, analyzer, false);
         analyzer.materializeSlots(resolvedExprs);
@@ -149,20 +155,22 @@ public final class AnalyticInfo extends AggregateInfoBase {
      * analytic tuple.
      */
     public void checkConsistency() {
-        ArrayList<SlotDescriptor> slots = intermediateTupleDesc_.getSlots();
+        ArrayList<SlotDescriptor> slots = intermediateTupleDesc.getSlots();
 
         // Check materialized slots.
         int numMaterializedSlots = 0;
         for (SlotDescriptor slotDesc: slots) {
-            if (slotDesc.isMaterialized()) ++numMaterializedSlots;
+            if (slotDesc.isMaterialized()) {
+                ++numMaterializedSlots;
+            }
         }
         Preconditions.checkState(numMaterializedSlots ==
-                materializedSlots_.size());
+                materializedSlots.size());
 
         // Check that analytic expr return types match the slot descriptors.
         int slotIdx = 0;
-        for (int i = 0; i < analyticExprs_.size(); ++i) {
-            Expr analyticExpr = analyticExprs_.get(i);
+        for (int i = 0; i < analyticExprs.size(); ++i) {
+            Expr analyticExpr = analyticExprs.get(i);
             Type slotType = slots.get(slotIdx).getType();
             Preconditions.checkState(analyticExpr.getType().equals(slotType),
                     String.format("Analytic expr %s returns type %s but its analytic tuple " +
@@ -176,8 +184,8 @@ public final class AnalyticInfo extends AggregateInfoBase {
     public String debugString() {
         StringBuilder out = new StringBuilder(super.debugString());
         out.append(MoreObjects.toStringHelper(this)
-                .add("analytic_exprs", Expr.debugString(analyticExprs_))
-                .add("smap", analyticTupleSmap_.debugString())
+                .add("analytic_exprs", Expr.debugString(analyticExprs))
+                .add("smap", analyticTupleSmap.debugString())
                 .toString());
         return out.toString();
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java
index b148a45960..53265089b4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java
@@ -29,6 +29,7 @@ import org.apache.doris.thrift.TAnalyticWindowType;
 import com.google.common.base.Preconditions;
 
 import java.math.BigDecimal;
+import java.util.Objects;
 
 /**
  * Windowing clause of an analytic expr
@@ -44,15 +45,15 @@ public class AnalyticWindow {
         ROWS("ROWS"),
         RANGE("RANGE");
 
-        private final String description_;
+        private final String description;
 
         private Type(String d) {
-            description_ = d;
+            description = d;
         }
 
         @Override
         public String toString() {
-            return description_;
+            return description;
         }
         public TAnalyticWindowType toThrift() {
             return this == ROWS ? TAnalyticWindowType.ROWS : TAnalyticWindowType.RANGE;
@@ -190,6 +191,11 @@ public class AnalyticWindow {
             return result;
         }
 
+        @Override
+        public int hashCode() {
+            return Objects.hash(type, expr);
+        }
+
         @Override
         public boolean equals(Object obj) {
             if (obj == null) {
@@ -229,80 +235,80 @@ public class AnalyticWindow {
         }
     }
 
-    private final Type type_;
-    private final Boundary leftBoundary_;
-    private Boundary rightBoundary_;  // may be null before analyze()
-    private String toSqlString_;  // cached after analysis
+    private final Type type;
+    private final Boundary leftBoundary;
+    private Boundary rightBoundary;  // may be null before analyze()
+    private String toSqlString;  // cached after analysis
 
     public Type getType() {
-        return type_;
+        return type;
     }
     public Boundary getLeftBoundary() {
-        return leftBoundary_;
+        return leftBoundary;
     }
     public Boundary getRightBoundary() {
-        return rightBoundary_;
+        return rightBoundary;
     }
     public Boundary setRightBoundary(Boundary b) {
-        return rightBoundary_ = b;
+        return rightBoundary = b;
     }
 
     public AnalyticWindow(Type type, Boundary b) {
-        type_ = type;
+        this.type = type;
         Preconditions.checkNotNull(b);
-        leftBoundary_ = b;
-        rightBoundary_ = null;
+        leftBoundary = b;
+        rightBoundary = null;
     }
 
     public AnalyticWindow(Type type, Boundary l, Boundary r) {
-        type_ = type;
+        this.type = type;
         Preconditions.checkNotNull(l);
-        leftBoundary_ = l;
+        leftBoundary = l;
         Preconditions.checkNotNull(r);
-        rightBoundary_ = r;
+        rightBoundary = r;
     }
 
     /**
      * Clone c'tor
      */
     private AnalyticWindow(AnalyticWindow other) {
-        type_ = other.type_;
-        Preconditions.checkNotNull(other.leftBoundary_);
-        leftBoundary_ = other.leftBoundary_.clone();
+        type = other.type;
+        Preconditions.checkNotNull(other.leftBoundary);
+        leftBoundary = other.leftBoundary.clone();
 
-        if (other.rightBoundary_ != null) {
-            rightBoundary_ = other.rightBoundary_.clone();
+        if (other.rightBoundary != null) {
+            rightBoundary = other.rightBoundary.clone();
         }
 
-        toSqlString_ = other.toSqlString_;  // safe to share
+        toSqlString = other.toSqlString;  // safe to share
     }
 
     public AnalyticWindow reverse() {
-        Boundary newRightBoundary = leftBoundary_.converse();
+        Boundary newRightBoundary = leftBoundary.converse();
         Boundary newLeftBoundary = null;
 
-        if (rightBoundary_ == null) {
-            newLeftBoundary = new Boundary(leftBoundary_.getType(), null);
+        if (rightBoundary == null) {
+            newLeftBoundary = new Boundary(leftBoundary.getType(), null);
         } else {
-            newLeftBoundary = rightBoundary_.converse();
+            newLeftBoundary = rightBoundary.converse();
         }
 
-        return new AnalyticWindow(type_, newLeftBoundary, newRightBoundary);
+        return new AnalyticWindow(type, newLeftBoundary, newRightBoundary);
     }
 
     public String toSql() {
-        if (toSqlString_ != null) {
-            return toSqlString_;
+        if (toSqlString != null) {
+            return toSqlString;
         }
 
         StringBuilder sb = new StringBuilder();
-        sb.append(type_.toString()).append(" ");
+        sb.append(type.toString()).append(" ");
 
-        if (rightBoundary_ == null) {
-            sb.append(leftBoundary_.toSql());
+        if (rightBoundary == null) {
+            sb.append(leftBoundary.toSql());
         } else {
-            sb.append("BETWEEN ").append(leftBoundary_.toSql()).append(" AND ");
-            sb.append(rightBoundary_.toSql());
+            sb.append("BETWEEN ").append(leftBoundary.toSql()).append(" AND ");
+            sb.append(rightBoundary.toSql());
         }
 
         return sb.toString();
@@ -310,13 +316,13 @@ public class AnalyticWindow {
 
     public String toDigest() {
         StringBuilder sb = new StringBuilder();
-        sb.append(type_.toString()).append(" ");
+        sb.append(type.toString()).append(" ");
 
-        if (rightBoundary_ == null) {
-            sb.append(leftBoundary_.toDigest());
+        if (rightBoundary == null) {
+            sb.append(leftBoundary.toDigest());
         } else {
-            sb.append("BETWEEN ").append(leftBoundary_.toDigest()).append(" AND ");
-            sb.append(rightBoundary_.toDigest());
+            sb.append("BETWEEN ").append(leftBoundary.toDigest()).append(" AND ");
+            sb.append(rightBoundary.toDigest());
         }
 
         return sb.toString();
@@ -324,21 +330,26 @@ public class AnalyticWindow {
 
 
     public TAnalyticWindow toThrift() {
-        TAnalyticWindow result = new TAnalyticWindow(type_.toThrift());
+        TAnalyticWindow result = new TAnalyticWindow(type.toThrift());
 
-        if (leftBoundary_.getType() != BoundaryType.UNBOUNDED_PRECEDING) {
-            result.setWindowStart(leftBoundary_.toThrift(type_));
+        if (leftBoundary.getType() != BoundaryType.UNBOUNDED_PRECEDING) {
+            result.setWindowStart(leftBoundary.toThrift(type));
         }
 
-        Preconditions.checkNotNull(rightBoundary_);
+        Preconditions.checkNotNull(rightBoundary);
 
-        if (rightBoundary_.getType() != BoundaryType.UNBOUNDED_FOLLOWING) {
-            result.setWindowEnd(rightBoundary_.toThrift(type_));
+        if (rightBoundary.getType() != BoundaryType.UNBOUNDED_FOLLOWING) {
+            result.setWindowEnd(rightBoundary.toThrift(type));
         }
 
         return result;
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(type, leftBoundary, rightBoundary);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (obj == null) {
@@ -351,14 +362,14 @@ public class AnalyticWindow {
 
         AnalyticWindow o = (AnalyticWindow)obj;
         boolean rightBoundaryEqual =
-            (rightBoundary_ == null) == (o.rightBoundary_ == null);
+            (rightBoundary == null) == (o.rightBoundary == null);
 
-        if (rightBoundaryEqual && rightBoundary_ != null) {
-            rightBoundaryEqual = rightBoundary_.equals(o.rightBoundary_);
+        if (rightBoundaryEqual && rightBoundary != null) {
+            rightBoundaryEqual = rightBoundary.equals(o.rightBoundary);
         }
 
-        return type_ == o.type_
-               && leftBoundary_.equals(o.leftBoundary_)
+        return type == o.type
+               && leftBoundary.equals(o.leftBoundary)
                && rightBoundaryEqual;
     }
 
@@ -393,7 +404,7 @@ public class AnalyticWindow {
             }
         }
 
-        if (type_ == Type.ROWS) {
+        if (type == Type.ROWS) {
             if (!e.isConstant() || !e.getType().isFixedPointType() || !isPos) {
                 throw new AnalysisException(
                         "For ROWS window, the value of a PRECEDING/FOLLOWING offset must be a "
@@ -445,84 +456,84 @@ public class AnalyticWindow {
     }
 
     public void analyze(Analyzer analyzer) throws AnalysisException {
-        leftBoundary_.analyze(analyzer);
+        leftBoundary.analyze(analyzer);
 
-        if (rightBoundary_ != null) {
-            rightBoundary_.analyze(analyzer);
+        if (rightBoundary != null) {
+            rightBoundary.analyze(analyzer);
         }
 
-        if (leftBoundary_.getType() == BoundaryType.UNBOUNDED_FOLLOWING) {
+        if (leftBoundary.getType() == BoundaryType.UNBOUNDED_FOLLOWING) {
             throw new AnalysisException(
-                    leftBoundary_.getType().toString() + " is only allowed for upper bound of "
+                    leftBoundary.getType().toString() + " is only allowed for upper bound of "
                     + "BETWEEN");
         }
 
-        if (rightBoundary_ != null
-                && rightBoundary_.getType() == BoundaryType.UNBOUNDED_PRECEDING) {
+        if (rightBoundary != null
+                && rightBoundary.getType() == BoundaryType.UNBOUNDED_PRECEDING) {
             throw new AnalysisException(
-                    rightBoundary_.getType().toString() + " is only allowed for lower bound of "
+                    rightBoundary.getType().toString() + " is only allowed for lower bound of "
                     + "BETWEEN");
         }
 
         // TODO: Remove when RANGE windows with offset boundaries are supported.
-        if (type_ == Type.RANGE) {
-            if (leftBoundary_.type.isOffset()
-                    || (rightBoundary_ != null && rightBoundary_.type.isOffset())
-                    || (leftBoundary_.type == BoundaryType.CURRENT_ROW
-                            && (rightBoundary_ == null
-                                    || rightBoundary_.type == BoundaryType.CURRENT_ROW))) {
+        if (type == Type.RANGE) {
+            if (leftBoundary.type.isOffset()
+                    || (rightBoundary != null && rightBoundary.type.isOffset())
+                    || (leftBoundary.type == BoundaryType.CURRENT_ROW
+                            && (rightBoundary == null
+                                    || rightBoundary.type == BoundaryType.CURRENT_ROW))) {
                 throw new AnalysisException(
                         "RANGE is only supported with both the lower and upper bounds UNBOUNDED or"
                         + " one UNBOUNDED and the other CURRENT ROW.");
             }
         }
 
-        if (rightBoundary_ == null && leftBoundary_.getType() == BoundaryType.FOLLOWING) {
+        if (rightBoundary == null && leftBoundary.getType() == BoundaryType.FOLLOWING) {
             throw new AnalysisException(
-                    leftBoundary_.getType().toString() + " requires a BETWEEN clause");
+                    leftBoundary.getType().toString() + " requires a BETWEEN clause");
         }
 
-        if (leftBoundary_.getType().isOffset()) {
-            checkOffsetExpr(analyzer, leftBoundary_);
+        if (leftBoundary.getType().isOffset()) {
+            checkOffsetExpr(analyzer, leftBoundary);
         }
 
-        if (rightBoundary_ == null) {
+        if (rightBoundary == null) {
             // set right boundary to implied value, but make sure to cache toSql string
             // beforehand
-            toSqlString_ = toSql();
-            rightBoundary_ = new Boundary(BoundaryType.CURRENT_ROW, null);
+            toSqlString = toSql();
+            rightBoundary = new Boundary(BoundaryType.CURRENT_ROW, null);
             return;
         }
 
-        if (rightBoundary_.getType().isOffset()) {
-            checkOffsetExpr(analyzer, rightBoundary_);
+        if (rightBoundary.getType().isOffset()) {
+            checkOffsetExpr(analyzer, rightBoundary);
         }
 
-        if (leftBoundary_.getType() == BoundaryType.FOLLOWING) {
-            if (rightBoundary_.getType() != BoundaryType.FOLLOWING
-                    && rightBoundary_.getType() != BoundaryType.UNBOUNDED_FOLLOWING) {
+        if (leftBoundary.getType() == BoundaryType.FOLLOWING) {
+            if (rightBoundary.getType() != BoundaryType.FOLLOWING
+                    && rightBoundary.getType() != BoundaryType.UNBOUNDED_FOLLOWING) {
                 throw new AnalysisException(
                         "A lower window bound of " + BoundaryType.FOLLOWING.toString()
                         + " requires that the upper bound also be "
                         + BoundaryType.FOLLOWING.toString());
             }
 
-            if (rightBoundary_.getType() != BoundaryType.UNBOUNDED_FOLLOWING) {
-                checkOffsetBoundaries(analyzer, leftBoundary_, rightBoundary_);
+            if (rightBoundary.getType() != BoundaryType.UNBOUNDED_FOLLOWING) {
+                checkOffsetBoundaries(analyzer, leftBoundary, rightBoundary);
             }
         }
 
-        if (rightBoundary_.getType() == BoundaryType.PRECEDING) {
-            if (leftBoundary_.getType() != BoundaryType.PRECEDING
-                    && leftBoundary_.getType() != BoundaryType.UNBOUNDED_PRECEDING) {
+        if (rightBoundary.getType() == BoundaryType.PRECEDING) {
+            if (leftBoundary.getType() != BoundaryType.PRECEDING
+                    && leftBoundary.getType() != BoundaryType.UNBOUNDED_PRECEDING) {
                 throw new AnalysisException(
                         "An upper window bound of " + BoundaryType.PRECEDING.toString()
                         + " requires that the lower bound also be "
                         + BoundaryType.PRECEDING.toString());
             }
 
-            if (leftBoundary_.getType() != BoundaryType.UNBOUNDED_PRECEDING) {
-                checkOffsetBoundaries(analyzer, rightBoundary_, leftBoundary_);
+            if (leftBoundary.getType() != BoundaryType.UNBOUNDED_PRECEDING) {
+                checkOffsetBoundaries(analyzer, rightBoundary, leftBoundary);
             }
         }
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
index a47e8a9b5e..0232f1178f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
@@ -102,7 +102,7 @@ public class Analyzer {
     // NOTE: Alias of table is case sensitive
     // UniqueAlias used to check whether the table ref or the alias is unique
     // table/view used db.table, inline use alias
-    private final Set<String> uniqueTableAliasSet_ = Sets.newHashSet();
+    private final Set<String> uniqueTableAliasSet = Sets.newHashSet();
     private final Multimap<String, TupleDescriptor> tupleByAlias = ArrayListMultimap.create();
 
     // NOTE: Alias of column is case ignorance
@@ -135,7 +135,7 @@ public class Analyzer {
     private String schemaTable; // table used in DESCRIBE Table
 
     // True if the corresponding select block has a limit and/or offset clause.
-    private boolean hasLimitOffsetClause_ = false;
+    private boolean hasLimitOffsetClause = false;
 
     // Current depth of nested analyze() calls. Used for enforcing a
     // maximum expr-tree depth. Needs to be manually maintained by the user
@@ -146,7 +146,7 @@ public class Analyzer {
     private boolean isSubquery = false;
 
     // Flag indicating whether this analyzer belongs to a WITH clause view.
-    private boolean isWithClause_ = false;
+    private boolean isWithClause = false;
 
     // By default, all registered semi-joined tuples are invisible, i.e., their slots
     // cannot be referenced. If set, this semi-joined tuple is made visible. Such a tuple
@@ -154,7 +154,7 @@ public class Analyzer {
     // In particular, if there are multiple semi-joins in the same query block, then the
     // On-clause of any such semi-join is not allowed to reference other semi-joined tuples
     // except its own. Therefore, only a single semi-joined tuple can be visible at a time.
-    private TupleId visibleSemiJoinedTupleId_ = null;
+    private TupleId visibleSemiJoinedTupleId = null;
     // for some situation that udf is not allowed.
     private boolean isUDFAllowed = true;
     // timezone specified for some operation, such as broker load
@@ -169,8 +169,8 @@ public class Analyzer {
     }
     public boolean setHasPlanHints() { return globalState.hasPlanHints = true; }
     public boolean hasPlanHints() { return globalState.hasPlanHints; }
-    public void setIsWithClause() { isWithClause_ = true; }
-    public boolean isWithClause() { return isWithClause_; }
+    public void setIsWithClause() { isWithClause = true; }
+    public boolean isWithClause() { return isWithClause; }
 
     public void setUDFAllowed(boolean val) { this.isUDFAllowed = val; }
     public boolean isUDFAllowed() { return this.isUDFAllowed; }
@@ -294,7 +294,7 @@ public class Analyzer {
         public final Map<SlotId, Analyzer> blockBySlot = Maps.newHashMap();
 
         // Expr rewriter for normalizing and rewriting expressions.
-        private final ExprRewriter exprRewriter_;
+        private final ExprRewriter exprRewriter;
 
         private final ExprRewriter mvExprRewriter;
 
@@ -322,7 +322,7 @@ public class Analyzer {
             List<ExprRewriteRule> onceRules = Lists.newArrayList();
             onceRules.add(ExtractCommonFactorsRule.INSTANCE);
             onceRules.add(InferFiltersRule.INSTANCE);
-            exprRewriter_ = new ExprRewriter(rules, onceRules);
+            exprRewriter = new ExprRewriter(rules, onceRules);
             // init mv rewriter
             List<ExprRewriteRule> mvRewriteRules = Lists.newArrayList();
             mvRewriteRules.add(ToBitmapToSlotRefRule.INSTANCE);
@@ -362,29 +362,29 @@ public class Analyzer {
     private final ArrayList<Analyzer> ancestors;
 
     // map from lowercase table alias to a view definition in this analyzer's scope
-    private final Map<String, View> localViews_ = Maps.newHashMap();
+    private final Map<String, View> localViews = Maps.newHashMap();
 
     // Map from lowercase table alias to descriptor. Tables without an explicit alias
     // are assigned two implicit aliases: the unqualified and fully-qualified table name.
     // Such tables have two entries pointing to the same descriptor. If an alias is
     // ambiguous, then this map retains the first entry with that alias to simplify error
     // checking (duplicate vs. ambiguous alias).
-    private final Map<String, TupleDescriptor> aliasMap_ = Maps.newHashMap();
+    private final Map<String, TupleDescriptor> aliasMap = Maps.newHashMap();
 
     // Map from tuple id to its corresponding table ref.
-    private final Map<TupleId, TableRef> tableRefMap_ = Maps.newHashMap();
+    private final Map<TupleId, TableRef> tableRefMap = Maps.newHashMap();
 
     // Set of lowercase ambiguous implicit table aliases.
-    private final Set<String> ambiguousAliases_ = Sets.newHashSet();
+    private final Set<String> ambiguousAliases = Sets.newHashSet();
 
     // Indicates whether this analyzer/block is guaranteed to have an empty result set
     // due to a limit 0 or constant conjunct evaluating to false.
-    private boolean hasEmptyResultSet_ = false;
+    private boolean hasEmptyResultSet = false;
 
     // Indicates whether the select-project-join (spj) portion of this query block
     // is guaranteed to return an empty result set. Set due to a constant non-Having
     // conjunct evaluating to false.
-    private boolean hasEmptySpjResultSet_ = false;
+    private boolean hasEmptySpjResultSet = false;
 
     public Analyzer(Catalog catalog, ConnectContext context) {
         ancestors = Lists.newArrayList();
@@ -454,7 +454,7 @@ public class Analyzer {
                         "labels must be smaller or equal to the number of returned columns.");
             }
         }
-        if (localViews_.put(view.getName(), view) != null) {
+        if (localViews.put(view.getName(), view) != null) {
             throw new AnalysisException(
                     String.format("Duplicate table alias: '%s'", view.getName()));
         }
@@ -498,10 +498,10 @@ public class Analyzer {
      */
     public TupleDescriptor registerTableRef(TableRef ref) throws AnalysisException {
         String uniqueAlias = ref.getUniqueAlias();
-        if (uniqueTableAliasSet_.contains(uniqueAlias)) {
+        if (uniqueTableAliasSet.contains(uniqueAlias)) {
             ErrorReport.reportAnalysisException(ErrorCode.ERR_NONUNIQ_TABLE, uniqueAlias);
         }
-        uniqueTableAliasSet_.add(uniqueAlias);
+        uniqueTableAliasSet.add(uniqueAlias);
 
         // If ref has no explicit alias, then the unqualified and the fully-qualified table
         // names are legal implicit aliases. Column references against unqualified implicit
@@ -510,12 +510,12 @@ public class Analyzer {
         String[] aliases = ref.getAliases();
         if (aliases.length > 1) {
             unqualifiedAlias = aliases[1];
-            TupleDescriptor tupleDesc = aliasMap_.get(unqualifiedAlias);
+            TupleDescriptor tupleDesc = aliasMap.get(unqualifiedAlias);
             if (tupleDesc != null) {
                 if (tupleDesc.hasExplicitAlias()) {
                     ErrorReport.reportAnalysisException(ErrorCode.ERR_NONUNIQ_TABLE, uniqueAlias);
                 } else {
-                    ambiguousAliases_.add(unqualifiedAlias);
+                    ambiguousAliases.add(unqualifiedAlias);
                 }
             }
         }
@@ -532,7 +532,7 @@ public class Analyzer {
             tupleByAlias.put(alias, result);
         }
 
-        tableRefMap_.put(result.getId(), ref);
+        tableRefMap.put(result.getId(), ref);
 
         return result;
     }
@@ -553,11 +553,11 @@ public class Analyzer {
             slot.setIsMaterialized(true);
             slot.setColumn(col);
             slot.setIsNullable(col.isAllowNull());
-            String key = tableRef.aliases_[0] + "." + col.getName();
+            String key = tableRef.aliases[0] + "." + col.getName();
             slotRefMap.put(key, slot);
         }
         globalState.descTbl.computeStatAndMemLayout();
-        tableRefMap_.put(result.getId(), ref);
+        tableRefMap.put(result.getId(), ref);
         for (String alias : tableRef.getAliases()) {
             tupleByAlias.put(alias, result);
         }
@@ -565,7 +565,7 @@ public class Analyzer {
     }
 
     public List<TupleId> getAllTupleIds() {
-        return new ArrayList<>(tableRefMap_.keySet());
+        return new ArrayList<>(tableRefMap.keySet());
     }
 
     /**
@@ -581,7 +581,9 @@ public class Analyzer {
      */
     public TableRef resolveTableRef(TableRef tableRef) throws AnalysisException {
         // Return the table if it is already resolved.
-        if (tableRef.isResolved()) return tableRef;
+        if (tableRef.isResolved()) {
+            return tableRef;
+        }
         // Try to find a matching local view.
         TableName tableName = tableRef.getName();
         if (!tableName.isFullyQualified()) {
@@ -590,8 +592,10 @@ public class Analyzer {
             String viewAlias = tableName.getTbl();
             Analyzer analyzer = this;
             do {
-                View localView = analyzer.localViews_.get(viewAlias);
-                if (localView != null) return new InlineViewRef(localView, tableRef);
+                View localView = analyzer.localViews.get(viewAlias);
+                if (localView != null) {
+                    return new InlineViewRef(localView, tableRef);
+                }
                 analyzer = (analyzer.ancestors.isEmpty() ? null : analyzer.ancestors.get(0));
             } while (analyzer != null);
         }
@@ -647,7 +651,7 @@ public class Analyzer {
     }
 
     public ExprRewriter getExprRewriter() {
-        return globalState.exprRewriter_;
+        return globalState.exprRewriter;
     }
 
     public ExprRewriter getMVExprRewriter() {
@@ -853,7 +857,9 @@ public class Analyzer {
         List<TupleId> tids = Lists.newArrayList();
         e.getIds(tids, null);
         for (TupleId tid: tids) {
-            if (!globalState.fullOuterJoinedTupleIds.containsKey(tid)) continue;
+            if (!globalState.fullOuterJoinedTupleIds.containsKey(tid)) {
+                continue;
+            }
             TableRef currentOuterJoin = globalState.fullOuterJoinedTupleIds.get(tid);
             globalState.fullOuterJoinedConjuncts.put(e.getId(), currentOuterJoin);
             break;
@@ -1170,7 +1176,9 @@ public class Analyzer {
     public List<Expr> getUnassignedConjuncts(List<TupleId> tupleIds) {
         List<Expr> result = Lists.newArrayList();
         for (Expr e : getUnassignedConjuncts(tupleIds, true)) {
-            if (canEvalPredicate(tupleIds, e)) result.add(e);
+            if (canEvalPredicate(tupleIds, e)) {
+                result.add(e);
+            }
         }
         return result;
     }
@@ -1280,7 +1288,9 @@ public class Analyzer {
     public boolean evalAfterJoin(Expr e) {
         List<TupleId> tids = Lists.newArrayList();
         e.getIds(tids, null);
-        if (tids.isEmpty()) return false;
+        if (tids.isEmpty()) {
+            return false;
+        }
         if (tids.size() > 1 || isOjConjunct(e) || isFullOuterJoined(e)
                 || (isOuterJoined(tids.get(0))
                 && (!e.isOnClauseConjunct() || isIjConjunct(e)))
@@ -1337,7 +1347,9 @@ public class Analyzer {
 
     public TableRef getAntiJoinRef(Expr e) {
         TableRef tblRef = globalState.sjClauseByConjunct.get(e.getId());
-        if (tblRef == null) return null;
+        if (tblRef == null) {
+            return null;
+        }
         return (tblRef.getJoinOp().isAntiJoin()) ? tblRef : null;
     }
 
@@ -1354,12 +1366,14 @@ public class Analyzer {
     }
 
     public boolean isVisible(TupleId tid) {
-        return tid == visibleSemiJoinedTupleId_ || !isSemiJoined(tid);
+        return tid == visibleSemiJoinedTupleId || !isSemiJoined(tid);
     }
 
     public boolean containsOuterJoinedTid(List<TupleId> tids) {
         for (TupleId tid: tids) {
-            if (isOuterJoined(tid)) return true;
+            if (isOuterJoined(tid)) {
+                return true;
+            }
         }
         return false;
     }
@@ -1373,7 +1387,7 @@ public class Analyzer {
     }
 
     public Set<String> getAliases() {
-        return uniqueTableAliasSet_;
+        return uniqueTableAliasSet;
     }
 
     public List<Expr> getAllConjuncts(TupleId id) {
@@ -1455,8 +1469,8 @@ public class Analyzer {
     public void setVisibleSemiJoinedTuple(TupleId tid) {
         Preconditions.checkState(tid == null
                 || globalState.semiJoinedTupleIds.containsKey(tid));
-        Preconditions.checkState(tid == null || visibleSemiJoinedTupleId_ == null);
-        visibleSemiJoinedTupleId_ = tid;
+        Preconditions.checkState(tid == null || visibleSemiJoinedTupleId == null);
+        visibleSemiJoinedTupleId = tid;
     }
 
     /**
@@ -1475,13 +1489,13 @@ public class Analyzer {
      * to return an empty result set, e.g., due to a limit 0 or a constant predicate
      * that evaluates to false.
      */
-    public boolean hasEmptyResultSet() { return hasEmptyResultSet_; }
-    public void setHasEmptyResultSet() { hasEmptyResultSet_ = true; }
+    public boolean hasEmptyResultSet() { return hasEmptyResultSet; }
+    public void setHasEmptyResultSet() { hasEmptyResultSet = true; }
 
-    public boolean hasEmptySpjResultSet() { return hasEmptySpjResultSet_; }
+    public boolean hasEmptySpjResultSet() { return hasEmptySpjResultSet; }
 
     public void setHasLimitOffsetClause(boolean hasLimitOffset) {
-        this.hasLimitOffsetClause_ = hasLimitOffset;
+        this.hasLimitOffsetClause = hasLimitOffset;
     }
 
     /**
@@ -1530,9 +1544,11 @@ public class Analyzer {
      */
     private void markConstantConjunct(Expr conjunct, boolean fromHavingClause)
             throws AnalysisException {
-        if (!conjunct.isConstant() || isOjConjunct(conjunct)) return;
-        if ((!fromHavingClause && !hasEmptySpjResultSet_)
-                || (fromHavingClause && !hasEmptyResultSet_)) {
+        if (!conjunct.isConstant() || isOjConjunct(conjunct)) {
+            return;
+        }
+        if ((!fromHavingClause && !hasEmptySpjResultSet)
+                || (fromHavingClause && !hasEmptyResultSet)) {
             try {
                 if (conjunct instanceof BetweenPredicate) {
                     // Rewrite the BetweenPredicate into a CompoundPredicate so we can evaluate it
@@ -1549,18 +1565,18 @@ public class Analyzer {
                     final BoolLiteral value = (BoolLiteral) newConjunct;
                     if (!value.getValue()) {
                         if (fromHavingClause) {
-                            hasEmptyResultSet_ = true;
+                            hasEmptyResultSet = true;
                         } else {
-                            hasEmptySpjResultSet_ = true;
+                            hasEmptySpjResultSet = true;
                         }
                     }
                     markConjunctAssigned(conjunct);
                 }
                 if (newConjunct instanceof NullLiteral) {
                     if (fromHavingClause) {
-                        hasEmptyResultSet_ = true;
+                        hasEmptyResultSet = true;
                     } else {
-                        hasEmptySpjResultSet_ = true;
+                        hasEmptySpjResultSet = true;
                     }
                     markConjunctAssigned(conjunct);
                 }
@@ -1600,7 +1616,9 @@ public class Analyzer {
      */
     public boolean canEvalOuterJoinedConjunct(Expr e, List<TupleId> tids) {
         TableRef outerJoin = getOjRef(e);
-        if (outerJoin == null) return true;
+        if (outerJoin == null) {
+            return true;
+        }
         return tids.containsAll(outerJoin.getAllTableRefIds());
     }
 
@@ -1618,9 +1636,13 @@ public class Analyzer {
         List<ExprId> conjunctIds = Lists.newArrayList();
         for (TupleId rhsId: rhsTblRefIds) {
             List<ExprId> cids = globalState.eqJoinConjuncts.get(rhsId);
-            if (cids == null) continue;
+            if (cids == null) {
+                continue;
+            }
             for (ExprId eid: cids) {
-                if (!conjunctIds.contains(eid)) conjunctIds.add(eid);
+                if (!conjunctIds.contains(eid)) {
+                    conjunctIds.add(eid);
+                }
             }
         }
 
@@ -1645,7 +1667,9 @@ public class Analyzer {
                 continue;
             }
 
-            if (ojClauseConjuncts != null && !ojClauseConjuncts.contains(conjunctId)) continue;
+            if (ojClauseConjuncts != null && !ojClauseConjuncts.contains(conjunctId)) {
+                continue;
+            }
             result.add(e);
         }
         return result;
@@ -1799,7 +1823,9 @@ public class Analyzer {
      */
     public void castToSetOpsCompatibleTypes(List<List<Expr>> exprLists)
             throws AnalysisException {
-        if (exprLists == null || exprLists.size() < 2) return;
+        if (exprLists == null || exprLists.size() < 2) {
+            return;
+        }
 
         // Determine compatible types for exprs, position by position.
         List<Expr> firstList = exprLists.get(0);
@@ -1982,9 +2008,13 @@ public class Analyzer {
 
         if (e.isOnClauseConjunct()) {
 
-            if (isAntiJoinedConjunct(e)) return canEvalAntiJoinedConjunct(e, tupleIds);
+            if (isAntiJoinedConjunct(e)) {
+                return canEvalAntiJoinedConjunct(e, tupleIds);
+            }
             if (isIjConjunct(e) || isSjConjunct(e)) {
-                if (!containsOuterJoinedTid(tids)) return true;
+                if (!containsOuterJoinedTid(tids)) {
+                    return true;
+                }
                 // If the predicate references an outer-joined tuple, then evaluate it at
                 // the join that the On-clause belongs to.
                 TableRef onClauseTableRef = null;
@@ -1997,11 +2027,15 @@ public class Analyzer {
                 return tupleIds.containsAll(onClauseTableRef.getAllTableRefIds());
             }
 
-            if (isFullOuterJoined(e)) return canEvalFullOuterJoinedConjunct(e, tupleIds);
+            if (isFullOuterJoined(e)) {
+                return canEvalFullOuterJoinedConjunct(e, tupleIds);
+            }
             if (isOjConjunct(e)) {
                 // Force this predicate to be evaluated by the corresponding outer join node.
                 // The join node will pick up the predicate later via getUnassignedOjConjuncts().
-                if (tids.size() > 1) return false;
+                if (tids.size() > 1) {
+                    return false;
+                }
                 // Optimization for single-tid predicates: Legal to assign below the outer join
                 // if the predicate is from the same On-clause that makes tid nullable
                 // (otherwise e needn't be true when that tuple is set).
@@ -2031,7 +2065,9 @@ public class Analyzer {
      */
     public boolean canEvalAntiJoinedConjunct(Expr e, List<TupleId> nodeTupleIds) {
         TableRef antiJoinRef = getAntiJoinRef(e);
-        if (antiJoinRef == null) return true;
+        if (antiJoinRef == null) {
+            return true;
+        }
         List<TupleId> tids = Lists.newArrayList();
         e.getIds(tids, null);
         if (tids.size() > 1) {
@@ -2049,7 +2085,9 @@ public class Analyzer {
      */
     public boolean canEvalFullOuterJoinedConjunct(Expr e, List<TupleId> tids) {
         TableRef fullOuterJoin = getFullOuterJoinRef(e);
-        if (fullOuterJoin == null) return true;
+        if (fullOuterJoin == null) {
+            return true;
+        }
         return tids.containsAll(fullOuterJoin.getAllTableRefIds());
     }
 
@@ -2100,7 +2138,7 @@ public class Analyzer {
         }
     }
 
-    public Map<String, View> getLocalViews() { return localViews_; }
+    public Map<String, View> getLocalViews() { return localViews; }
 
     public boolean isOuterJoined(TupleId tid) {
         return globalState.outerJoinedTupleIds.containsKey(tid);
@@ -2187,7 +2225,9 @@ public class Analyzer {
     public boolean hasOuterJoinedValueTransferTarget(List<SlotId> sids) {
         for (SlotId srcSid : sids) {
             for (SlotId dstSid : getValueTransferTargets(srcSid)) {
-                if (isOuterJoined(getTupleId(dstSid))) return true;
+                if (isOuterJoined(getTupleId(dstSid))) {
+                    return true;
+                }
             }
         }
         return false;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java
index 361d8c4cf6..bd1ac2d0b7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java
@@ -361,6 +361,7 @@ public class ArithmeticExpr extends Expr {
                     if (isConstant()) {
                         castUpperInteger(t1, t2);
                     }
+                    break;
                 case MOD:
                     if (t1.isDecimalV2() || t2.isDecimalV2()) {
                         castBinaryOp(findCommonType(t1, t2));
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BaseTableRef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BaseTableRef.java
index 09269f37f8..39f722a92a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BaseTableRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BaseTableRef.java
@@ -37,8 +37,10 @@ public class BaseTableRef extends TableRef {
         this.table = table;
         this.name = tableName;
         // Set implicit aliases if no explicit one was given.
-        if (hasExplicitAlias()) return;
-        aliases_ = new String[] { name.toString(), tableName.getNoClusterString(), tableName.getTbl() };
+        if (hasExplicitAlias()) {
+            return;
+        }
+        aliases = new String[] { name.toString(), tableName.getNoClusterString(), tableName.getTbl() };
     }
 
     protected BaseTableRef(BaseTableRef other) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BetweenPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BetweenPredicate.java
index d3787c26bb..5dc2e4c5a9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BetweenPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BetweenPredicate.java
@@ -110,6 +110,21 @@ public class BetweenPredicate extends Predicate {
     @Override
     public Expr clone(ExprSubstitutionMap sMap) { return new BetweenPredicate(this); }
 
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
+        if (!super.equals(o)) {
+            return false;
+        }
+        BetweenPredicate that = (BetweenPredicate) o;
+        return isNotBetween == that.isNotBetween;
+    }
+
     @Override
     public int hashCode() {
         return 31 * super.hashCode() + Boolean.hashCode(isNotBetween);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java
index cc1c4cc29c..271c8d2260 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java
@@ -52,7 +52,7 @@ public class BinaryPredicate extends Predicate implements Writable {
     private final static Logger LOG = LogManager.getLogger(BinaryPredicate.class);
 
     // true if this BinaryPredicate is inferred from slot equivalences, false otherwise.
-    private boolean isInferred_ = false;
+    private boolean isInferred = false;
 
     public enum Operator {
         EQ("=", "eq", TExprOpcode.EQ),
@@ -104,8 +104,9 @@ public class BinaryPredicate extends Predicate implements Writable {
                     return LE;
                 case EQ_FOR_NULL:
                     return this;
+                default:
+                    return null;
             }
-            return null;
         }
 
         public Operator converse() {
@@ -163,15 +164,17 @@ public class BinaryPredicate extends Predicate implements Writable {
         super(other);
         op = other.op;
         slotIsleft= other.slotIsleft;
-        isInferred_ = other.isInferred_;
+        isInferred = other.isInferred;
     }
 
-    public boolean isInferred() { return isInferred_; }
-    public void setIsInferred() { isInferred_ = true; }
+    public boolean isInferred() { return isInferred; }
+    public void setIsInferred() { isInferred = true; }
 
     public static void initBuiltins(FunctionSet functionSet) {
         for (Type t: Type.getSupportedTypes()) {
-            if (t.isNull()) continue; // NULL is handled through type promotion.
+            if (t.isNull()) {
+                continue; // NULL is handled through type promotion.
+            }
             functionSet.addBuiltinBothScalaAndVectorized(ScalarFunction.createBuiltinOperator(
                     Operator.EQ.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
             functionSet.addBuiltinBothScalaAndVectorized(ScalarFunction.createBuiltinOperator(
@@ -455,7 +458,9 @@ public class BinaryPredicate extends Predicate implements Writable {
      * casts, returns those two slots; otherwise returns null.
      */
     public static Pair<SlotId, SlotId> getEqSlots(Expr e) {
-        if (!(e instanceof BinaryPredicate)) return null;
+        if (!(e instanceof BinaryPredicate)) {
+            return null;
+        }
         return ((BinaryPredicate) e).getEqSlots();
     }
 
@@ -465,11 +470,17 @@ public class BinaryPredicate extends Predicate implements Writable {
      */
     @Override
     public Pair<SlotId, SlotId> getEqSlots() {
-        if (op != Operator.EQ) return null;
+        if (op != Operator.EQ) {
+            return null;
+        }
         SlotRef lhs = getChild(0).unwrapSlotRef(true);
-        if (lhs == null) return null;
+        if (lhs == null) {
+            return null;
+        }
         SlotRef rhs = getChild(1).unwrapSlotRef(true);
-        if (rhs == null) return null;
+        if (rhs == null) {
+            return null;
+        }
         return new Pair<SlotId, SlotId>(lhs.getSlotId(), rhs.getSlotId());
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BoolLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BoolLiteral.java
index 4a6ef66c0f..e434668286 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BoolLiteral.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BoolLiteral.java
@@ -152,4 +152,19 @@ public class BoolLiteral extends LiteralExpr {
     public int hashCode() {
         return 31 * super.hashCode() + Boolean.hashCode(value);
     }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
+        if (!super.equals(o)) {
+            return false;
+        }
+        BoolLiteral that = (BoolLiteral) o;
+        return value == that.value;
+    }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java
index bef35e2b06..ff66fb29e4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java
@@ -34,7 +34,7 @@ import java.util.ArrayList;
  * Internal representation of a builtin aggregate function.
  */
 public class BuiltinAggregateFunction extends Function {
-    private final Operator                          op_;
+    private final Operator op;
     // this is to judge the analytic function
     private boolean isAnalyticFn = false;
 
@@ -42,7 +42,7 @@ public class BuiltinAggregateFunction extends Function {
         return isAnalyticFn;
     }
     // TODO: this is not used yet until the planner understand this.
-    private org.apache.doris.catalog.Type intermediateType_;
+    private org.apache.doris.catalog.Type intermediateType;
     private boolean reqIntermediateTuple = false;
 
     public boolean isReqIntermediateTuple() {
@@ -58,8 +58,8 @@ public class BuiltinAggregateFunction extends Function {
         Preconditions.checkState(op != null);
         // may be no need to analyze
         // intermediateType.analyze();
-        op_ = op;
-        intermediateType_ = intermediateType;
+        this.op = op;
+        this.intermediateType = intermediateType;
         if (isAnalyticFn && !intermediateType.equals(retType)) {
             reqIntermediateTuple = true;
         }
@@ -71,25 +71,25 @@ public class BuiltinAggregateFunction extends Function {
     public TFunction toThrift() {
         TFunction fn = super.toThrift();
         // TODO: for now, just put the op_ enum as the id.
-        if (op_ == BuiltinAggregateFunction.Operator.FIRST_VALUE_REWRITE) {
+        if (op == BuiltinAggregateFunction.Operator.FIRST_VALUE_REWRITE) {
             fn.setId(0);
         } else {
-            fn.setId(op_.thriftOp.ordinal());
+            fn.setId(op.thriftOp.ordinal());
         }
-        fn.setAggregateFn(new TAggregateFunction(intermediateType_.toThrift()));
+        fn.setAggregateFn(new TAggregateFunction(intermediateType.toThrift()));
         return fn;
     }
 
     public Operator op() {
-        return op_;
+        return op;
     }
 
     public org.apache.doris.catalog.Type getIntermediateType() {
-        return intermediateType_;
+        return intermediateType;
     }
 
     public void setIntermediateType(org.apache.doris.catalog.Type t) {
-        intermediateType_ = t;
+        intermediateType = t;
     }
 
     // TODO: this is effectively a catalog of builtin aggregate functions.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java
index b4daa214ee..787d658552 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java
@@ -32,6 +32,7 @@ import com.google.common.collect.Lists;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Objects;
 
 /**
  * CASE and DECODE are represented using this class. The backend implementation is
@@ -98,6 +99,11 @@ public class CaseExpr extends Expr {
         return new CaseExpr(this);
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(super.hashCode(), hasCaseExpr, hasElseExpr);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (!super.equals(obj)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java
index cca6217816..d2e9691bf2 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java
@@ -313,6 +313,11 @@ public class CastExpr extends Expr {
         analyze();
     }
 
+    @Override
+    public int hashCode() {
+        return super.hashCode();
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (!super.equals(obj)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
index 29e3aed56d..86a818ff8e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
@@ -280,6 +280,7 @@ public class ColumnDef {
                 if (floatLiteral.getType().equals(Type.DOUBLE)) {
                     throw new AnalysisException("Default value will loose precision: " + defaultValue);
                 }
+                break;
             case DOUBLE:
                 FloatLiteral doubleLiteral = new FloatLiteral(defaultValue);
                 break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java
index 8b454ce533..87b0dfd883 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java
@@ -145,6 +145,8 @@ public class CompoundPredicate extends Predicate {
             case NOT:
                 selectivity = 1.0 - getChild(0).selectivity;
                 break;
+            default:
+                throw new AnalysisException("not support operator: " + op);
         }
         selectivity = Math.max(0.0, Math.min(1.0, selectivity));
         if (LOG.isDebugEnabled()) {
@@ -180,7 +182,9 @@ public class CompoundPredicate extends Predicate {
      */
     @Override
     public Expr negate() {
-        if (op == Operator.NOT) return getChild(0);
+        if (op == Operator.NOT) {
+            return getChild(0);
+        }
         Expr negatedLeft = getChild(0).negate();
         Expr negatedRight = getChild(1).negate();
         Operator newOp = (op == Operator.OR) ? Operator.AND : Operator.OR;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java
index 09dd6b1f8d..75fcb05f2a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java
@@ -428,8 +428,8 @@ public class DateLiteral extends LiteralExpr {
     private long makePackedDatetime() {
         long ymd = ((year * 13 + month) << 5) | day;
         long hms = (hour << 12) | (minute << 6) | second;
-        long packed_datetime = ((ymd << 17) | hms) << 24 + microsecond;
-        return packed_datetime;
+        long packedDatetime = ((ymd << 17) | hms) << 24 + microsecond;
+        return packedDatetime;
     }
 
     @Override
@@ -446,9 +446,9 @@ public class DateLiteral extends LiteralExpr {
         out.writeLong(makePackedDatetime());
     }
 
-    private void fromPackedDatetime(long packed_time) {
-        microsecond = (packed_time % (1L << 24));
-        long ymdhms = (packed_time >> 24);
+    private void fromPackedDatetime(long packedTime) {
+        microsecond = (packedTime % (1L << 24));
+        long ymdhms = (packedTime >> 24);
         long ymd = ymdhms >> 17;
         long hms = ymdhms % (1 << 17);
 
@@ -467,11 +467,11 @@ public class DateLiteral extends LiteralExpr {
 
     public void readFields(DataInput in) throws IOException {
         super.readFields(in);
-        short date_literal_type = in.readShort();
+        short dateLiteralType = in.readShort();
         fromPackedDatetime(in.readLong());
-        if (date_literal_type == DateLiteralType.DATETIME.value()) {
+        if (dateLiteralType == DateLiteralType.DATETIME.value()) {
             this.type = Type.DATETIME;
-        } else if (date_literal_type == DateLiteralType.DATE.value()) {
+        } else if (dateLiteralType == DateLiteralType.DATE.value()) {
             this.type = Type.DATE;
         } else {
             throw new IOException("Error date literal type : " + type);
@@ -815,8 +815,7 @@ public class DateLiteral extends LiteralExpr {
                     case 'I':
                     case 'l':
                         usaTime = true;
-                        // Fall through
-                    case 'k':
+                    case 'k': // CHECKSTYLE IGNORE THIS LINE: Fall through
                     case 'H':
                         tmp = findNumber(value, vp, 2);
                         intValue = strToLong(value.substring(vp, tmp));
@@ -1024,12 +1023,12 @@ public class DateLiteral extends LiteralExpr {
             }
             long days = calcDaynr(strictWeekNumber ? strictWeekNumberYear : this.year, 1, 1);
 
-            long weekday_b = calcWeekday(days, sundayFirst);
+            long weekdayB = calcWeekday(days, sundayFirst);
 
             if (sundayFirst) {
-                days += ((weekday_b == 0) ? 0 : 7) - weekday_b + (weekNum - 1) * 7 + weekday % 7;
+                days += ((weekdayB == 0) ? 0 : 7) - weekdayB + (weekNum - 1) * 7 + weekday % 7;
             } else {
-                days += ((weekday_b <= 3) ? 0 : 7) - weekday_b + (weekNum - 1) * 7 + weekday - 1;
+                days += ((weekdayB <= 3) ? 0 : 7) - weekdayB + (weekNum - 1) * 7 + weekday - 1;
             }
             getDateFromDaynr(days);
         }
@@ -1194,12 +1193,12 @@ public class DateLiteral extends LiteralExpr {
         int fieldLen = yearLen;
         while (pre < dateStr.length() && Character.isDigit(dateStr.charAt(pre)) && fieldIdx < MAX_DATE_PARTS - 1) {
             int start = pre;
-            int temp_val = 0;
+            int tempVal = 0;
             boolean scanToDelim = (!isIntervalFormat) && (fieldIdx != 6);
             while (pre < dateStr.length() && Character.isDigit(dateStr.charAt(pre)) && (scanToDelim || fieldLen-- != 0)) {
-                temp_val = temp_val * 10 + (dateStr.charAt(pre++) - '0');
+                tempVal = tempVal * 10 + (dateStr.charAt(pre++) - '0');
             }
-            dateVal[fieldIdx] = temp_val;
+            dateVal[fieldIdx] = tempVal;
             dateLen[fieldIdx] = pre - start;
             fieldLen = 2;
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java
index 148c93fb91..e2f933291f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java
@@ -47,27 +47,27 @@ public class DescriptorTable {
     // List of referenced tables with no associated TupleDescriptor to ship to the BE.
     // For example, the output table of an insert query.
     private final List<Table> referencedTables = new ArrayList<Table>();
-    private final IdGenerator<TupleId> tupleIdGenerator_ = TupleId.createGenerator();
-    private final IdGenerator<SlotId> slotIdGenerator_ = SlotId.createGenerator();
+    private final IdGenerator<TupleId> tupleIdGenerator = TupleId.createGenerator();
+    private final IdGenerator<SlotId> slotIdGenerator = SlotId.createGenerator();
     private final HashMap<SlotId, SlotDescriptor> slotDescs = Maps.newHashMap();
 
     public DescriptorTable() {
     }
 
     public TupleDescriptor createTupleDescriptor() {
-        TupleDescriptor d = new TupleDescriptor(tupleIdGenerator_.getNextId());
+        TupleDescriptor d = new TupleDescriptor(tupleIdGenerator.getNextId());
         tupleDescs.put(d.getId(), d);
         return d;
     }
 
     public TupleDescriptor createTupleDescriptor(String debugName) {
-        TupleDescriptor d = new TupleDescriptor(tupleIdGenerator_.getNextId(), debugName);
+        TupleDescriptor d = new TupleDescriptor(tupleIdGenerator.getNextId(), debugName);
         tupleDescs.put(d.getId(), d);
         return d;
     }
 
     public SlotDescriptor addSlotDescriptor(TupleDescriptor d) {
-        SlotDescriptor result = new SlotDescriptor(slotIdGenerator_.getNextId(), d);
+        SlotDescriptor result = new SlotDescriptor(slotIdGenerator.getNextId(), d);
         d.addSlot(result);
         slotDescs.put(result.getId(), result);
         return result;
@@ -78,7 +78,7 @@ public class DescriptorTable {
      * computed.
      */
     public TupleDescriptor copyTupleDescriptor(TupleId srcId, String debugName) {
-        TupleDescriptor d = new TupleDescriptor(tupleIdGenerator_.getNextId(), debugName);
+        TupleDescriptor d = new TupleDescriptor(tupleIdGenerator.getNextId(), debugName);
         tupleDescs.put(d.getId(), d);
         // create copies of slots
         TupleDescriptor src = tupleDescs.get(srcId);
@@ -93,7 +93,7 @@ public class DescriptorTable {
      * Append copy of src to dest.
      */
     public SlotDescriptor copySlotDescriptor(TupleDescriptor dest, SlotDescriptor src) {
-        SlotDescriptor result = new SlotDescriptor(slotIdGenerator_.getNextId(), dest, src);
+        SlotDescriptor result = new SlotDescriptor(slotIdGenerator.getNextId(), dest, src);
         dest.addSlot(result);
         slotDescs.put(result.getId(), result);
         return result;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java
index c125e742fc..8213063965 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java
@@ -240,7 +240,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
 
     protected Type type;  // result of analysis
 
-    protected boolean isOnClauseConjunct_; // set by analyzer
+    protected boolean isOnClauseConjunct; // set by analyzer
 
     protected boolean isAnalyzed = false;  // true after analyze() has been called
 
@@ -267,7 +267,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
     protected Function fn;
 
     // Cached value of IsConstant(), set during analyze() and valid if isAnalyzed_ is true.
-    private boolean isConstant_;
+    private boolean isConstant;
 
     // Flag to indicate whether to wrap this expr's toSql() in parenthesis. Set by parser.
     // Needed for properly capturing expr precedences in the SQL string.
@@ -292,7 +292,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
         numDistinctValues = other.numDistinctValues;
         opcode = other.opcode;
         outputScale = other.outputScale;
-        isConstant_ = other.isConstant_;
+        isConstant = other.isConstant;
         fn = other.fn;
         printSqlInParens = other.printSqlInParens;
         children = Expr.cloneList(other.children);
@@ -357,8 +357,8 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
         isFilter = v;
     }
 
-    public boolean isOnClauseConjunct() { return isOnClauseConjunct_; }
-    public void setIsOnClauseConjunct(boolean b) { isOnClauseConjunct_ = b; }
+    public boolean isOnClauseConjunct() { return isOnClauseConjunct; }
+    public void setIsOnClauseConjunct(boolean b) { isOnClauseConjunct = b; }
     public boolean isAuxExpr() { return isAuxExpr; }
     public void setIsAuxExpr() { isAuxExpr = true; }
     public Function getFn() {
@@ -378,7 +378,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      * Throws exception if any errors found.
      */
     public final void analyze(Analyzer analyzer) throws AnalysisException {
-        if (isAnalyzed()) return;
+        if (isAnalyzed()) {
+            return;
+        }
 
         // Check the expr child limit.
         if (children.size() > Config.expr_children_limit) {
@@ -401,7 +403,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
         for (Expr child: children) {
             child.analyze(analyzer);
         }
-        if (analyzer != null) analyzer.decrementCallDepth();
+        if (analyzer != null) {
+            analyzer.decrementCallDepth();
+        }
         computeNumDistinctValues();
 
         // Do all the analysis for the expr subclass before marking the Expr analyzed.
@@ -424,7 +428,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
         Preconditions.checkState(!isAnalyzed);
         // We need to compute the const-ness as the last step, since analysis may change
         // the result, e.g. by resolving function.
-        isConstant_ = isConstantImpl();
+        isConstant = isConstantImpl();
         isAnalyzed = true;
     }
 
@@ -691,10 +695,14 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
                               boolean preserveRootType) throws AnalysisException {
         Expr result = clone();
         // Return clone to avoid removing casts.
-        if (smap == null) return result;
+        if (smap == null) {
+            return result;
+        }
         result = result.substituteImpl(smap, analyzer);
         result.analyze(analyzer);
-        if (preserveRootType && !type.equals(result.getType())) result = result.castTo(type);
+        if (preserveRootType && !type.equals(result.getType())) {
+            result = result.castTo(type);
+        }
         return result;
     }
 
@@ -751,10 +759,14 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      */
     protected Expr substituteImpl(ExprSubstitutionMap smap, Analyzer analyzer)
             throws AnalysisException {
-        if (isImplicitCast()) return getChild(0).substituteImpl(smap, analyzer);
+        if (isImplicitCast()) {
+            return getChild(0).substituteImpl(smap, analyzer);
+        }
         if (smap != null) {
             Expr substExpr = smap.get(this);
-            if (substExpr != null) return substExpr.clone();
+            if (substExpr != null) {
+                return substExpr.clone();
+            }
         }
         for (int i = 0; i < children.size(); ++i) {
             children.set(i, children.get(i).substituteImpl(smap, analyzer));
@@ -762,7 +774,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
         // SlotRefs must remain analyzed to support substitution across query blocks. All
         // other exprs must be analyzed again after the substitution to add implicit casts
         // and for resolving their correct function signature.
-        if (!(this instanceof SlotRef)) resetAnalysisState();
+        if (!(this instanceof SlotRef)) {
+            resetAnalysisState();
+        }
         return this;
     }
 
@@ -824,7 +838,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      * the exprs have an invalid number of distinct values.
      */
     public static long getNumDistinctValues(List<Expr> exprs) {
-      if (exprs == null || exprs.isEmpty()) return 0;
+      if (exprs == null || exprs.isEmpty()) {
+          return 0;
+      }
       long numDistinctValues = 1;
       for (Expr expr: exprs) {
         if (expr.getNumDistinctValues() == -1) {
@@ -1012,7 +1028,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      * Resets the internal analysis state of this expr tree. Removes implicit casts.
      */
     public Expr reset() {
-      if (isImplicitCast()) return getChild(0).reset();
+      if (isImplicitCast()) {
+          return getChild(0).reset();
+      }
       for (int i = 0; i < children.size(); ++i) {
         children.set(i, children.get(i).reset());
       }
@@ -1172,7 +1190,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      */
     public boolean isBoundByTupleIds(List<TupleId> tids) {
         for (Expr child: children) {
-            if (!child.isBoundByTupleIds(tids)) return false;
+            if (!child.isBoundByTupleIds(tids)) {
+                return false;
+            }
         }
         return true;
     }
@@ -1241,7 +1261,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      * FunctionCallExpr.isConstant()).
      */
     public final boolean isConstant() {
-        if (isAnalyzed) return isConstant_;
+        if (isAnalyzed) {
+            return isConstant;
+        }
         return isConstantImpl();
     }
 
@@ -1250,7 +1272,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      */
     protected boolean isConstantImpl() {
         for (Expr expr : children) {
-            if (!expr.isConstant()) return false;
+            if (!expr.isConstant()) {
+                return false;
+            }
         }
         return true;
     }
@@ -1518,7 +1542,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      */
     public SlotRef unwrapSlotRef(boolean implicitOnly) {
         Expr unwrappedExpr = unwrapExpr(implicitOnly);
-        if (unwrappedExpr instanceof SlotRef) return (SlotRef) unwrappedExpr;
+        if (unwrappedExpr instanceof SlotRef) {
+            return (SlotRef) unwrappedExpr;
+        }
         return null;
     }
 
@@ -1870,7 +1896,9 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
 
     public static Expr getFirstBoundChild(Expr expr, List<TupleId> tids) {
         for (Expr child: expr.getChildren()) {
-            if (child.isBoundByTupleIds(tids)) return child;
+            if (child.isBoundByTupleIds(tids)) {
+                return child;
+            }
         }
         return null;
     }
@@ -1879,10 +1907,16 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      * Returns true if expr contains specify function, otherwise false.
      */
     public boolean isContainsFunction(String functionName) {
-        if (fn == null) return false;
-        if (fn.functionName().equalsIgnoreCase(functionName))  return true;
+        if (fn == null) {
+            return false;
+        }
+        if (fn.functionName().equalsIgnoreCase(functionName))  {
+            return true;
+        }
         for (Expr child: children) {
-            if (child.isContainsFunction(functionName)) return true;
+            if (child.isContainsFunction(functionName)) {
+                return true;
+            }
         }
         return false;
     }
@@ -1891,16 +1925,22 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
      * Returns true if expr contains specify className, otherwise false.
      */
     public boolean isContainsClass(String className) {
-        if (this.getClass().getName().equalsIgnoreCase(className)) return true;
+        if (this.getClass().getName().equalsIgnoreCase(className)) {
+            return true;
+        }
         for (Expr child: children) {
-            if (child.isContainsClass(className)) return true;
+            if (child.isContainsClass(className)) {
+                return true;
+            }
         }
         return false;
     }
 
     protected boolean hasNullableChild() {
         for (Expr expr : children) {
-            if (expr.isNullable()) return true;
+            if (expr.isNullable()) {
+                return true;
+            }
         }
         return false;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java
index dc8af2cd53..8507fd26a5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java
@@ -37,9 +37,9 @@ public class ExprId extends Id<ExprId> {
     public static IdGenerator<ExprId> createGenerator() {
         return new IdGenerator<ExprId>() {
             @Override
-            public ExprId getNextId() { return new ExprId(nextId_++); }
+            public ExprId getNextId() { return new ExprId(nextId++); }
             @Override
-            public ExprId getMaxId() { return new ExprId(nextId_ - 1); }
+            public ExprId getMaxId() { return new ExprId(nextId - 1); }
         };
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java
index 20eccffb4f..b7d44fe501 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java
@@ -39,9 +39,9 @@ import java.util.List;
 public final class ExprSubstitutionMap {
     private final static Logger LOG = LoggerFactory.getLogger(ExprSubstitutionMap.class);
 
-    private boolean checkAnalyzed_ = true;
-    private List<Expr> lhs_; // left-hand side
-    private List<Expr> rhs_; // right-hand side
+    private boolean checkAnalyzed = true;
+    private List<Expr> lhs; // left-hand side
+    private List<Expr> rhs; // right-hand side
 
     public ExprSubstitutionMap() {
         this(Lists.<Expr>newArrayList(), Lists.<Expr>newArrayList());
@@ -50,12 +50,12 @@ public final class ExprSubstitutionMap {
     // Only used to convert show statement to select statement
     public ExprSubstitutionMap(boolean checkAnalyzed) {
         this(Lists.<Expr>newArrayList(), Lists.<Expr>newArrayList());
-        this.checkAnalyzed_ = checkAnalyzed;
+        this.checkAnalyzed = checkAnalyzed;
     }
 
     public ExprSubstitutionMap(List<Expr> lhs, List<Expr> rhs) {
-        lhs_ = lhs;
-        rhs_ = rhs;
+        this.lhs = lhs;
+        this.rhs = rhs;
     }
 
     /**
@@ -63,18 +63,20 @@ public final class ExprSubstitutionMap {
      * across query blocks. It is not required that the lhsExpr is analyzed.
      */
     public void put(Expr lhsExpr, Expr rhsExpr) {
-        Preconditions.checkState(!checkAnalyzed_ || rhsExpr.isAnalyzed(),
+        Preconditions.checkState(!checkAnalyzed || rhsExpr.isAnalyzed(),
                 "Rhs expr must be analyzed.");
-        lhs_.add(lhsExpr);
-        rhs_.add(rhsExpr);
+        lhs.add(lhsExpr);
+        rhs.add(rhsExpr);
     }
 
     /**
      * Returns the expr mapped to lhsExpr or null if no mapping to lhsExpr exists.
      */
     public Expr get(Expr lhsExpr) {
-        for (int i = 0; i < lhs_.size(); ++i) {
-            if (lhsExpr.equals(lhs_.get(i))) return rhs_.get(i);
+        for (int i = 0; i < lhs.size(); ++i) {
+            if (lhsExpr.equals(lhs.get(i))) {
+                return rhs.get(i);
+            }
         }
         return null;
     }
@@ -83,7 +85,7 @@ public final class ExprSubstitutionMap {
      * Returns true if the smap contains a mapping for lhsExpr.
      */
     public boolean containsMappingFor(Expr lhsExpr) {
-        return lhs_.contains(lhsExpr);
+        return lhs.contains(lhsExpr);
     }
 
     /**
@@ -93,17 +95,23 @@ public final class ExprSubstitutionMap {
      */
     public static ExprSubstitutionMap compose(ExprSubstitutionMap f, ExprSubstitutionMap g,
                                               Analyzer analyzer) {
-        if (f == null && g == null) return new ExprSubstitutionMap();
-        if (f == null) return g;
-        if (g == null) return f;
+        if (f == null && g == null) {
+            return new ExprSubstitutionMap();
+        }
+        if (f == null) {
+            return g;
+        }
+        if (g == null) {
+            return f;
+        }
         ExprSubstitutionMap result = new ExprSubstitutionMap();
         // f's substitution targets need to be substituted via g
-        result.lhs_ = Expr.cloneList(f.lhs_);
-        result.rhs_ = Expr.substituteList(f.rhs_, g, analyzer, false);
+        result.lhs = Expr.cloneList(f.lhs);
+        result.rhs = Expr.substituteList(f.rhs, g, analyzer, false);
 
         // substitution maps are cumulative: the combined map contains all
         // substitutions from f and g.
-        for (int i = 0; i < g.lhs_.size(); i++) {
+        for (int i = 0; i < g.lhs.size(); i++) {
             // If f contains expr1->fn(expr2) and g contains expr2->expr3,
             // then result must contain expr1->fn(expr3).
             // The check before adding to result.lhs is to ensure that cases
@@ -112,9 +120,9 @@ public final class ExprSubstitutionMap {
             // and g: count(*) -> slotref
             // result.lhs must only have: count(*) -> zeroifnull(slotref) from f above,
             // and not count(*) -> slotref from g as well.
-            if (!result.lhs_.contains(g.lhs_.get(i))) {
-                result.lhs_.add(g.lhs_.get(i).clone());
-                result.rhs_.add(g.rhs_.get(i).clone());
+            if (!result.lhs.contains(g.lhs.get(i))) {
+                result.lhs.add(g.lhs.get(i).clone());
+                result.rhs.add(g.rhs.get(i).clone());
             }
         }
 
@@ -127,33 +135,39 @@ public final class ExprSubstitutionMap {
      */
     public static ExprSubstitutionMap combine(ExprSubstitutionMap f,
                                               ExprSubstitutionMap g) {
-        if (f == null && g == null) return new ExprSubstitutionMap();
-        if (f == null) return g;
-        if (g == null) return f;
+        if (f == null && g == null) {
+            return new ExprSubstitutionMap();
+        }
+        if (f == null) {
+            return g;
+        }
+        if (g == null) {
+            return f;
+        }
         ExprSubstitutionMap result = new ExprSubstitutionMap();
-        result.lhs_ = Lists.newArrayList(f.lhs_);
-        result.lhs_.addAll(g.lhs_);
-        result.rhs_ = Lists.newArrayList(f.rhs_);
-        result.rhs_.addAll(g.rhs_);
+        result.lhs = Lists.newArrayList(f.lhs);
+        result.lhs.addAll(g.lhs);
+        result.rhs = Lists.newArrayList(f.rhs);
+        result.rhs.addAll(g.rhs);
         result.verify();
         return result;
     }
 
     public void substituteLhs(ExprSubstitutionMap lhsSmap, Analyzer analyzer) {
-        lhs_ = Expr.substituteList(lhs_, lhsSmap, analyzer, false);
+        lhs = Expr.substituteList(lhs, lhsSmap, analyzer, false);
     }
 
-    public List<Expr> getLhs() { return lhs_; }
-    public List<Expr> getRhs() { return rhs_; }
+    public List<Expr> getLhs() { return lhs; }
+    public List<Expr> getRhs() { return rhs; }
 
-    public int size() { return lhs_.size(); }
+    public int size() { return lhs.size(); }
 
     public String debugString() {
-        Preconditions.checkState(lhs_.size() == rhs_.size());
+        Preconditions.checkState(lhs.size() == rhs.size());
         List<String> output = Lists.newArrayList();
-        for (int i = 0; i < lhs_.size(); ++i) {
-            output.add(lhs_.get(i).toSql() + ":" + rhs_.get(i).toSql());
-            output.add("(" + lhs_.get(i).debugString() + ":" + rhs_.get(i).debugString() + ")");
+        for (int i = 0; i < lhs.size(); ++i) {
+            output.add(lhs.get(i).toSql() + ":" + rhs.get(i).toSql());
+            output.add("(" + lhs.get(i).debugString() + ":" + rhs.get(i).debugString() + ")");
         }
         return "smap(" + Joiner.on(" ").join(output) + ")";
     }
@@ -163,9 +177,9 @@ public final class ExprSubstitutionMap {
      * and that all rhs exprs are analyzed.
      */
     private void verify() {
-        for (int i = 0; i < lhs_.size(); ++i) {
-            for (int j = i + 1; j < lhs_.size(); ++j) {
-                if (lhs_.get(i).equals(lhs_.get(j))) {
+        for (int i = 0; i < lhs.size(); ++i) {
+            for (int j = i + 1; j < lhs.size(); ++j) {
+                if (lhs.get(i).equals(lhs.get(j))) {
                     if (LOG.isTraceEnabled()) {
                         LOG.trace("verify: smap=" + this.debugString());
                     }
@@ -173,17 +187,17 @@ public final class ExprSubstitutionMap {
                     // Preconditions.checkState(false);
                 }
             }
-            Preconditions.checkState(!checkAnalyzed_ || rhs_.get(i).isAnalyzed());
+            Preconditions.checkState(!checkAnalyzed || rhs.get(i).isAnalyzed());
         }
     }
 
     public void clear() {
-        lhs_.clear();
-        rhs_.clear();
+        lhs.clear();
+        rhs.clear();
     }
 
     @Override
     public ExprSubstitutionMap clone() {
-        return new ExprSubstitutionMap(Expr.cloneList(lhs_), Expr.cloneList(rhs_));
+        return new ExprSubstitutionMap(Expr.cloneList(lhs), Expr.cloneList(rhs));
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java
index 2557ad89a8..7789202e88 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java
@@ -288,10 +288,12 @@ public enum ExpressionFunctions {
 
         @Override
         public boolean equals(Object o) {
-            if (this == o)
+            if (this == o) {
                 return true;
-            if (o == null || getClass() != o.getClass())
+            }
+            if (o == null || getClass() != o.getClass()) {
                 return false;
+            }
             FEFunctionSignature signature = (FEFunctionSignature) o;
             return Objects.equals(name, signature.name) && Arrays.equals(argTypes, signature.argTypes)
                     && Objects.equals(returnType, signature.returnType);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/FromClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/FromClause.java
index 8c1ab2c0ee..4906de085a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/FromClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/FromClause.java
@@ -46,27 +46,27 @@ import java.util.List;
  */
 public class FromClause implements ParseNode, Iterable<TableRef> {
 
-    private final ArrayList<TableRef> tableRefs_;
+    private final ArrayList<TableRef> tablerefs;
 
-    private boolean analyzed_ = false;
+    private boolean analyzed = false;
     private boolean needToSql = false;
 
     public FromClause(List<TableRef> tableRefs) {
-        tableRefs_ = Lists.newArrayList(tableRefs);
+        tablerefs = Lists.newArrayList(tableRefs);
         // Set left table refs to ensure correct toSql() before analysis.
-        for (int i = 1; i < tableRefs_.size(); ++i) {
-            tableRefs_.get(i).setLeftTblRef(tableRefs_.get(i - 1));
+        for (int i = 1; i < tablerefs.size(); ++i) {
+            tablerefs.get(i).setLeftTblRef(tablerefs.get(i - 1));
         }
     }
 
-    public FromClause() { tableRefs_ = Lists.newArrayList(); }
-    public List<TableRef> getTableRefs() { return tableRefs_; }
+    public FromClause() { tablerefs = Lists.newArrayList(); }
+    public List<TableRef> getTableRefs() { return tablerefs; }
     public void setNeedToSql(boolean needToSql) {
         this.needToSql = needToSql;
     }
 
     private void checkFromHiveTable(Analyzer analyzer) throws AnalysisException {
-        for (TableRef tblRef : tableRefs_) {
+        for (TableRef tblRef : tablerefs) {
             if (!(tblRef instanceof BaseTableRef)) {
                 continue;
             }
@@ -99,7 +99,7 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
      * because the table t1 in the on clause cannot be recognized.
      */
     private void sortTableRefKeepSequenceOfOnClause() {
-        Collections.sort(this.tableRefs_, new Comparator<TableRef>() {
+        Collections.sort(this.tablerefs, new Comparator<TableRef>() {
             @Override
             public int compare(TableRef tableref1, TableRef tableref2) {
                 int i1 = 0;
@@ -117,10 +117,12 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
 
     @Override
     public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
-        if (analyzed_) return;
+        if (analyzed) {
+            return;
+        }
 
-        if (tableRefs_.isEmpty()) {
-            analyzed_ = true;
+        if (tablerefs.isEmpty()) {
+            analyzed = true;
             return;
         }
 
@@ -135,11 +137,11 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
 
         // Start out with table refs to establish aliases.
         TableRef leftTblRef = null;  // the one to the left of tblRef
-        for (int i = 0; i < tableRefs_.size(); ++i) {
+        for (int i = 0; i < tablerefs.size(); ++i) {
             // Resolve and replace non-InlineViewRef table refs with a BaseTableRef or ViewRef.
-            TableRef tblRef = tableRefs_.get(i);
+            TableRef tblRef = tablerefs.get(i);
             tblRef = analyzer.resolveTableRef(tblRef);
-            tableRefs_.set(i, Preconditions.checkNotNull(tblRef));
+            tablerefs.set(i, Preconditions.checkNotNull(tblRef));
             tblRef.setLeftTblRef(leftTblRef);
             if (tblRef instanceof InlineViewRef) {
                 ((InlineViewRef) tblRef).setNeedToSql(needToSql);
@@ -151,12 +153,14 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
         // TODO: remove when query from hive table is supported
         checkFromHiveTable(analyzer);
 
-        analyzed_ = true;
+        analyzed = true;
     }
 
     public FromClause clone() {
         ArrayList<TableRef> clone = Lists.newArrayList();
-        for (TableRef tblRef: tableRefs_) clone.add(tblRef.clone());
+        for (TableRef tblRef: tablerefs) {
+            clone.add(tblRef.clone());
+        }
         return new FromClause(clone);
     }
 
@@ -178,16 +182,16 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
             // }
             get(i).reset();
         }
-        this.analyzed_ = false;
+        this.analyzed = false;
     }
 
     @Override
     public String toSql() {
         StringBuilder builder = new StringBuilder();
-        if (!tableRefs_.isEmpty()) {
+        if (!tablerefs.isEmpty()) {
             builder.append(" FROM");
-            for (int i = 0; i < tableRefs_.size(); ++i) {
-                builder.append(" " + tableRefs_.get(i).toSql());
+            for (int i = 0; i < tablerefs.size(); ++i) {
+                builder.append(" " + tablerefs.get(i).toSql());
             }
         }
         return builder.toString();
@@ -195,23 +199,23 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
 
     public String toDigest() {
         StringBuilder builder = new StringBuilder();
-        if (!tableRefs_.isEmpty()) {
+        if (!tablerefs.isEmpty()) {
             builder.append(" FROM");
-            for (int i = 0; i < tableRefs_.size(); ++i) {
-                builder.append(" " + tableRefs_.get(i).toDigest());
+            for (int i = 0; i < tablerefs.size(); ++i) {
+                builder.append(" " + tablerefs.get(i).toDigest());
             }
         }
         return builder.toString();
     }
 
-    public boolean isEmpty() { return tableRefs_.isEmpty(); }
+    public boolean isEmpty() { return tablerefs.isEmpty(); }
 
     @Override
-    public Iterator<TableRef> iterator() { return tableRefs_.iterator(); }
-    public int size() { return tableRefs_.size(); }
-    public TableRef get(int i) { return tableRefs_.get(i); }
-    public void set(int i, TableRef tableRef) { tableRefs_.set(i, tableRef); }
-    public void add(TableRef t) { tableRefs_.add(t); }
-    public void addAll(List<TableRef> t) { tableRefs_.addAll(t); }
-    public void clear() { tableRefs_.clear(); }
+    public Iterator<TableRef> iterator() { return tablerefs.iterator(); }
+    public int size() { return tablerefs.size(); }
+    public TableRef get(int i) { return tablerefs.get(i); }
+    public void set(int i, TableRef tableRef) { tablerefs.set(i, tableRef); }
+    public void add(TableRef t) { tablerefs.add(t); }
+    public void addAll(List<TableRef> t) { tablerefs.addAll(t); }
+    public void clear() { tablerefs.clear(); }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java
index 7f55d59fe4..e5f45317bd 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java
@@ -1158,7 +1158,9 @@ public class FunctionCallExpr extends Expr {
         // TODO: we can't correctly determine const-ness before analyzing 'fn_'. We should
         // rework logic so that we do not call this function on unanalyzed exprs.
         // Aggregate functions are never constant.
-        if (fn instanceof AggregateFunction || fn == null) return false;
+        if (fn instanceof AggregateFunction || fn == null) {
+            return false;
+        }
 
         final String fnName = this.fnName.getFunction();
         // Non-deterministic functions are never constant.
@@ -1166,7 +1168,9 @@ public class FunctionCallExpr extends Expr {
             return false;
         }
         // Sleep is a special function for testing.
-        if (fnName.equalsIgnoreCase("sleep")) return false;
+        if (fnName.equalsIgnoreCase("sleep")) {
+            return false;
+        }
         return super.isConstantImpl();
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionName.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionName.java
index b9aadd3265..754a9cf924 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionName.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionName.java
@@ -44,35 +44,35 @@ import java.util.Objects;
 public class FunctionName implements Writable {
     private static final Logger LOG = LogManager.getLogger(FunctionName.class);
 
-    private String db_;
-    private String fn_;
+    private String db;
+    private String fn;
 
     private FunctionName() {
     }
 
     public FunctionName(String db, String fn) {
-        db_ = db;
-        fn_ = fn.toLowerCase();
-        if (db_ != null) {
-            db_ = db_.toLowerCase();
+        this.db = db;
+        this.fn = fn.toLowerCase();
+        if (this.db != null) {
+            this.db = this.db.toLowerCase();
         }
     }
 
     public FunctionName(String fn) {
-        db_ = null;
-        fn_ = fn.toLowerCase();
+        db = null;
+        this.fn = fn.toLowerCase();
     }
 
     public FunctionName(TFunctionName thriftName) {
-        db_ = thriftName.db_name.toLowerCase();
-        fn_ = thriftName.function_name.toLowerCase();
+        db = thriftName.db_name.toLowerCase();
+        fn = thriftName.function_name.toLowerCase();
     }
 
     // Same as FunctionName but for builtins and we'll leave the case
     // as is since we aren't matching by string.
     public static FunctionName createBuiltinName(String fn) {
         FunctionName name = new FunctionName(fn);
-        name.fn_ = fn;
+        name.fn = fn;
         return name;
     }
 
@@ -86,47 +86,47 @@ public class FunctionName implements Writable {
             return false;
         }
         FunctionName o = (FunctionName) obj;
-        if ((db_ == null || o.db_ == null) && (db_ != o.db_)) {
-            if (db_ == null && o.db_ != null) {
+        if ((db == null || o.db == null) && (db != o.db)) {
+            if (db == null && o.db != null) {
                 return false;
             }
-            if (db_ != null && o.db_ == null) {
+            if (db != null && o.db == null) {
                 return false;
             }
-            if (!db_.equalsIgnoreCase(o.db_)) {
+            if (!db.equalsIgnoreCase(o.db)) {
                 return false;
             }
         }
-        return fn_.equalsIgnoreCase(o.fn_);
+        return fn.equalsIgnoreCase(o.fn);
     }
 
     public String getDb() {
-        return db_;
+        return db;
     }
 
     public void setDb(String db) {
-        db_ = db;
+        this.db = db;
     }
 
     public String getFunction() {
-        return fn_;
+        return fn;
     }
 
     public boolean isFullyQualified() {
-        return db_ != null;
+        return db != null;
     }
 
     @Override
     public String toString() {
-        if (db_ == null) {
-            return fn_;
+        if (db == null) {
+            return fn;
         }
-        return db_ + "." + fn_;
+        return db + "." + fn;
     }
 
     // used to analyze db element in function name, add cluster
     public String analyzeDb(Analyzer analyzer) throws AnalysisException {
-        String db = db_;
+        String db = this.db;
         if (db == null) {
             db = analyzer.getDefaultDb();
         } else {
@@ -139,29 +139,29 @@ public class FunctionName implements Writable {
     }
 
     public void analyze(Analyzer analyzer) throws AnalysisException {
-        if (fn_.length() == 0) {
+        if (fn.length() == 0) {
             throw new AnalysisException("Function name can not be empty.");
         }
-        for (int i = 0; i < fn_.length(); ++i) {
-            if (!isValidCharacter(fn_.charAt(i))) {
+        for (int i = 0; i < fn.length(); ++i) {
+            if (!isValidCharacter(fn.charAt(i))) {
                 throw new AnalysisException(
                   "Function names must be all alphanumeric or underscore. " +
-                    "Invalid name: " + fn_);
+                    "Invalid name: " + fn);
             }
         }
-        if (Character.isDigit(fn_.charAt(0))) {
-            throw new AnalysisException("Function cannot start with a digit: " + fn_);
+        if (Character.isDigit(fn.charAt(0))) {
+            throw new AnalysisException("Function cannot start with a digit: " + fn);
         }
-        if (db_ == null) {
-            db_ = analyzer.getDefaultDb();
-            if (Strings.isNullOrEmpty(db_)) {
+        if (db == null) {
+            db = analyzer.getDefaultDb();
+            if (Strings.isNullOrEmpty(db)) {
                 ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_DB_ERROR);
             }
         } else {
             if (Strings.isNullOrEmpty(analyzer.getClusterName())) {
                 ErrorReport.reportAnalysisException(ErrorCode.ERR_CLUSTER_NAME_NULL);
             }
-            db_ = ClusterNamespace.getFullName(analyzer.getClusterName(), db_);
+            db = ClusterNamespace.getFullName(analyzer.getClusterName(), db);
         }
 
         // If the function name is not fully qualified, it must not be the same as a builtin
@@ -177,28 +177,28 @@ public class FunctionName implements Writable {
     }
 
     public TFunctionName toThrift() {
-        TFunctionName name = new TFunctionName(fn_);
-        name.setDbName(db_);
-        name.setFunctionName(fn_);
+        TFunctionName name = new TFunctionName(fn);
+        name.setDbName(db);
+        name.setFunctionName(fn);
         return name;
     }
 
     @Override
     public void write(DataOutput out) throws IOException {
-        if (db_ != null) {
+        if (db != null) {
             out.writeBoolean(true);
-            Text.writeString(out, db_);
+            Text.writeString(out, db);
         } else {
             out.writeBoolean(false);
         }
-        Text.writeString(out, fn_);
+        Text.writeString(out, fn);
     }
 
     public void readFields(DataInput in) throws IOException {
         if (in.readBoolean()) {
-            db_ = Text.readString(in);
+            db = Text.readString(in);
         }
-        fn_ = Text.readString(in);
+        fn = Text.readString(in);
     }
 
     public static FunctionName read(DataInput in) throws IOException{
@@ -209,6 +209,6 @@ public class FunctionName implements Writable {
 
     @Override
     public int hashCode() {
-        return 31 * Objects.hashCode(db_) + Objects.hashCode(fn_);
+        return 31 * Objects.hashCode(db) + Objects.hashCode(fn);
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java
index 302242bd1d..d74218c69e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java
@@ -50,7 +50,7 @@ public class GroupByClause implements ParseNode {
     // max num of distinct sets in grouping sets clause
     private final static int MAX_GROUPING_SETS_NUM = 64;
     // max num of distinct expressions
-    private boolean analyzed_ = false;
+    private boolean analyzed = false;
     private boolean exprGenerated = false;
     private GroupingType groupingType;
     private ArrayList<Expr> groupingExprs;
@@ -96,7 +96,7 @@ public class GroupByClause implements ParseNode {
 
     public void reset() {
         groupingExprs = new ArrayList<>();
-        analyzed_ = false;
+        analyzed = false;
         exprGenerated = false;
         if (oriGroupingExprs != null) {
             Expr.resetList(oriGroupingExprs);
@@ -169,7 +169,7 @@ public class GroupByClause implements ParseNode {
 
     @Override
     public void analyze(Analyzer analyzer) throws AnalysisException {
-        if (analyzed_) {
+        if (analyzed) {
             return;
         }
         genGroupingExprs();
@@ -206,7 +206,7 @@ public class GroupByClause implements ParseNode {
             throw new AnalysisException("Too many sets in GROUP BY clause, the max grouping sets item is "
                     + MAX_GROUPING_SETS_NUM);
         }
-        analyzed_ = true;
+        analyzed = true;
     }
 
     // check if group by clause is contain grouping set/rollup/cube
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/InPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/InPredicate.java
index 575ce07aa1..de9d7ffad7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/InPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/InPredicate.java
@@ -60,12 +60,16 @@ public class InPredicate extends Predicate {
 
     public static void initBuiltins(FunctionSet functionSet) {
         for (Type t: Type.getSupportedTypes()) {
-            if (t.isNull()) continue;
+            if (t.isNull()) {
+                continue;
+            }
             // TODO we do not support codegen for CHAR and the In predicate must be codegened
             // because it has variable number of arguments. This will force CHARs to be
             // cast up to strings; meaning that "in" comparisons will not have CHAR comparison
             // semantics.
-            if (t.getPrimitiveType() == PrimitiveType.CHAR) continue;
+            if (t.getPrimitiveType() == PrimitiveType.CHAR) {
+                continue;
+            }
 
             String typeString = Function.getUdfTypeName(t.getPrimitiveType());
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java
index f576f02bd8..835fd21f5e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java
@@ -99,20 +99,24 @@ public class InlineViewRef extends TableRef {
     public InlineViewRef(View view, TableRef origTblRef) {
         super(origTblRef.getName(), origTblRef.getExplicitAlias());
         queryStmt = view.getQueryStmt().clone();
-        if (view.isLocalView()) queryStmt.reset();
+        if (view.isLocalView()) {
+            queryStmt.reset();
+        }
         this.view = view;
         sMap = new ExprSubstitutionMap();
         baseTblSmap = new ExprSubstitutionMap();
         setJoinAttrs(origTblRef);
         explicitColLabels = view.getColLabels();
         // Set implicit aliases if no explicit one was given.
-        if (hasExplicitAlias()) return;
+        if (hasExplicitAlias()) {
+            return;
+        }
         // TODO(zc)
         // view_.getTableName().toString().toLowerCase(), view.getName().toLowerCase()
         if (view.isLocalView()) {
-            aliases_ = new String[]{view.getName()};
+            aliases = new String[]{view.getName()};
         } else {
-            aliases_ = new String[]{name.toString(), view.getName()};
+            aliases = new String[]{name.toString(), view.getName()};
         }
         if (origTblRef.getLateralViewRefs() != null) {
             lateralViewRefs = (ArrayList<LateralViewRef>) origTblRef.getLateralViewRefs().clone();
@@ -181,12 +185,12 @@ public class InlineViewRef extends TableRef {
         inlineViewAnalyzer = new Analyzer(analyzer);
 
         queryStmt.analyze(inlineViewAnalyzer);
-        correlatedTupleIds_.addAll(queryStmt.getCorrelatedTupleIds(inlineViewAnalyzer));
+        correlatedTupleIds.addAll(queryStmt.getCorrelatedTupleIds(inlineViewAnalyzer));
 
         queryStmt.getMaterializedTupleIds(materializedTupleIds);
         if (view != null && !hasExplicitAlias() && !view.isLocalView()) {
             name = analyzer.getFqTableName(name);
-            aliases_ = new String[] { name.toString(), view.getName() };
+            aliases = new String[] { name.toString(), view.getName() };
         }
         //TODO(chenhao16): fix TableName in Db.Table style
         // name.analyze(analyzer);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java
index f6643720b8..619fe484e0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java
@@ -40,7 +40,9 @@ public class IsNullPredicate extends Predicate {
 
     public static void initBuiltins(FunctionSet functionSet) {
         for (Type t: Type.getSupportedTypes()) {
-            if (t.isNull()) continue;
+            if (t.isNull()) {
+                continue;
+            }
             String isNullSymbol;
             if (t == Type.BOOLEAN) {
                 isNullSymbol = "_ZN5doris15IsNullPredicate7is_nullIN9doris_udf10BooleanValE" +
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LimitElement.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LimitElement.java
index a7d4a5120f..8ddeb452ec 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LimitElement.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LimitElement.java
@@ -109,7 +109,9 @@ public class LimitElement {
     }
 
     public void analyze(Analyzer analyzer) {
-        if (limit == 0) analyzer.setHasEmptyResultSet();
+        if (limit == 0) {
+            analyzer.setHasEmptyResultSet();
+        }
     }
 
     public void reset() {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java
index 9f3a8973f0..9aa8253dab 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java
@@ -17,7 +17,7 @@
 
 package org.apache.doris.analysis;
 
-import org.apache.doris.backup.HDFSStorage;
+import org.apache.doris.backup.HdfsStorage;
 import org.apache.doris.backup.S3Storage;
 import org.apache.doris.catalog.PrimitiveType;
 import org.apache.doris.catalog.Type;
@@ -319,6 +319,7 @@ public class OutFileClause {
                     if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().isReturnObjectDataAsBinary()) {
                         column.add("byte_array");
                     }
+                    break;
                 default:
                     throw new AnalysisException("currently parquet do not support column type: " + expr.getType().getPrimitiveType());
             }
@@ -438,7 +439,7 @@ public class OutFileClause {
         if (storageType == StorageBackend.StorageType.S3) {
             S3Storage.checkS3(new CaseInsensitiveMap(brokerProps));
         } else if (storageType == StorageBackend.StorageType.HDFS) {
-            HDFSStorage.checkHDFS(new CaseInsensitiveMap(brokerProps));
+            HdfsStorage.checkHDFS(new CaseInsensitiveMap(brokerProps));
         }
 
         brokerDesc = new BrokerDesc(brokerName, storageType, brokerProps);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java
index c6fc843b7c..7e767a994f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java
@@ -56,7 +56,7 @@ public abstract class QueryStmt extends StatementBase {
     /////////////////////////////////////////
     // BEGIN: Members that need to be reset()
 
-    protected WithClause withClause_;
+    protected WithClause withClause;
 
     protected ArrayList<OrderByElement> orderByElements;
     // Limit element could not be null, the default limit element is NO_LIMIT
@@ -173,10 +173,14 @@ public abstract class QueryStmt extends StatementBase {
 
     @Override
     public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
-        if (isAnalyzed()) return;
+        if (isAnalyzed()) {
+            return;
+        }
         super.analyze(analyzer);
         analyzeLimit(analyzer);
-        if (hasWithClause()) withClause_.analyze(analyzer);
+        if (hasWithClause()) {
+            withClause.analyze(analyzer);
+        }
     }
 
     private void analyzeLimit(Analyzer analyzer) throws AnalysisException {
@@ -218,7 +222,9 @@ public abstract class QueryStmt extends StatementBase {
         List<TableRef> tblRefs = Lists.newArrayList();
         collectTableRefs(tblRefs);
         for (TableRef tblRef : tblRefs) {
-            if (absoluteRef == null && !tblRef.isRelative()) absoluteRef = tblRef;
+            if (absoluteRef == null && !tblRef.isRelative()) {
+                absoluteRef = tblRef;
+            }
             /*if (tblRef.isCorrelated()) {
              *
              *   // Check if the correlated table ref is rooted at a tuple descriptor from within
@@ -385,7 +391,9 @@ public abstract class QueryStmt extends StatementBase {
      */
     protected Expr getFirstAmbiguousAlias(List<Expr> exprs) {
         for (Expr exp : exprs) {
-            if (ambiguousAliasList.contains(exp)) return exp;
+            if (ambiguousAliasList.contains(exp)) {
+                return exp;
+            }
         }
         return null;
     }
@@ -425,9 +433,13 @@ public abstract class QueryStmt extends StatementBase {
     // select list items.  Return null if not an ordinal expression.
     private Expr trySubstituteOrdinal(Expr expr, String errorPrefix,
                                       Analyzer analyzer) throws AnalysisException {
-        if (!(expr instanceof IntLiteral)) return null;
+        if (!(expr instanceof IntLiteral)) {
+            return null;
+        }
         expr.analyze(analyzer);
-        if (!expr.getType().isIntegerType()) return null;
+        if (!expr.getType().isIntegerType()) {
+            return null;
+        }
         long pos = ((IntLiteral) expr).getLongValue();
         if (pos < 1) {
             throw new AnalysisException(
@@ -444,14 +456,14 @@ public abstract class QueryStmt extends StatementBase {
     }
 
     public void getWithClauseTables(Analyzer analyzer, Map<Long, Table> tableMap, Set<String> parentViewNameSet) throws AnalysisException {
-        if (withClause_ != null) {
-            withClause_.getTables(analyzer, tableMap, parentViewNameSet);
+        if (withClause != null) {
+            withClause.getTables(analyzer, tableMap, parentViewNameSet);
         }
     }
 
     public void getWithClauseTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) {
-        if (withClause_ != null) {
-            withClause_.getTableRefs(analyzer, tblRefs, parentViewNameSet);
+        if (withClause != null) {
+            withClause.getTableRefs(analyzer, tblRefs, parentViewNameSet);
         }
     }
 
@@ -560,15 +572,15 @@ public abstract class QueryStmt extends StatementBase {
     }
 
     public void setWithClause(WithClause withClause) {
-        this.withClause_ = withClause;
+        this.withClause = withClause;
     }
 
     public boolean hasWithClause() {
-        return withClause_ != null;
+        return withClause != null;
     }
 
     public WithClause getWithClause() {
-        return withClause_;
+        return withClause;
     }
 
     public boolean hasOrderByClause() {
@@ -689,15 +701,19 @@ public abstract class QueryStmt extends StatementBase {
     }
 
     public ArrayList<OrderByElement> cloneOrderByElements() {
-        if (orderByElements == null) return null;
+        if (orderByElements == null) {
+            return null;
+        }
         ArrayList<OrderByElement> result =
                 Lists.newArrayListWithCapacity(orderByElements.size());
-        for (OrderByElement o : orderByElements) result.add(o.clone());
+        for (OrderByElement o : orderByElements) {
+            result.add(o.clone());
+        }
         return result;
     }
 
     public WithClause cloneWithClause() {
-        return withClause_ != null ? withClause_.clone() : null;
+        return withClause != null ? withClause.clone() : null;
     }
 
     public OutFileClause cloneOutfileCluse() {
@@ -713,7 +729,7 @@ public abstract class QueryStmt extends StatementBase {
      */
     protected QueryStmt(QueryStmt other) {
         super(other);
-        withClause_ = other.cloneWithClause();
+        withClause = other.cloneWithClause();
         outFileClause = other.cloneOutfileCluse();
         orderByElements = other.cloneOrderByElements();
         limitElement = other.limitElement.clone();
@@ -730,8 +746,9 @@ public abstract class QueryStmt extends StatementBase {
     public void reset() {
         super.reset();
         if (orderByElements != null) {
-            for (OrderByElement o : orderByElements)
+            for (OrderByElement o : orderByElements) {
                 o.getExpr().reset();
+            }
         }
         limitElement.reset();
         resultExprs.clear();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java
index c2f3d32c35..d861d63d2b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java
@@ -79,7 +79,7 @@ public class SelectStmt extends QueryStmt {
 
     protected SelectList selectList;
     private final ArrayList<String> colLabels; // lower case column labels
-    protected final FromClause fromClause_;
+    protected final FromClause fromClause;
     protected GroupByClause groupByClause;
     private List<Expr> originalExpr;
     //
@@ -111,7 +111,7 @@ public class SelectStmt extends QueryStmt {
 
     // SQL string of this SelectStmt before inline-view expression substitution.
     // Set in analyze().
-    protected String sqlString_;
+    protected String sqlString;
 
     // Table alias generator used during query rewriting.
     private TableAliasGenerator tableAliasGenerator = null;
@@ -123,7 +123,7 @@ public class SelectStmt extends QueryStmt {
         super(orderByElement, limitElement);
         this.valueList = valueList;
         this.selectList = new SelectList();
-        this.fromClause_ = new FromClause();
+        this.fromClause = new FromClause();
         this.colLabels = Lists.newArrayList();
     }
 
@@ -139,9 +139,9 @@ public class SelectStmt extends QueryStmt {
         this.selectList = selectList;
         this.originSelectList = selectList.clone();
         if (fromClause == null) {
-            fromClause_ = new FromClause();
+            this.fromClause = new FromClause();
         } else {
-            fromClause_ = fromClause;
+            this.fromClause = fromClause;
         }
         this.whereClause = wherePredicate;
         this.groupByClause = groupByClause;
@@ -158,7 +158,7 @@ public class SelectStmt extends QueryStmt {
         super(other);
         this.id = other.id;
         selectList = other.selectList.clone();
-        fromClause_ = other.fromClause_.clone();
+        fromClause = other.fromClause.clone();
         whereClause = (other.whereClause != null) ? other.whereClause.clone() : null;
         groupByClause = (other.groupByClause != null) ? other.groupByClause.clone() : null;
         havingClause = (other.havingClause != null) ? other.havingClause.clone() : null;
@@ -166,7 +166,7 @@ public class SelectStmt extends QueryStmt {
         colLabels = Lists.newArrayList(other.colLabels);
         aggInfo = (other.aggInfo != null) ? other.aggInfo.clone() : null;
         analyticInfo = (other.analyticInfo != null) ? other.analyticInfo.clone() : null;
-        sqlString_ = (other.sqlString_ != null) ? other.sqlString_ : null;
+        sqlString = (other.sqlString != null) ? other.sqlString : null;
         baseTblSmap = other.baseTblSmap.clone();
         groupingInfo = null;
     }
@@ -176,7 +176,7 @@ public class SelectStmt extends QueryStmt {
         super.reset();
         selectList.reset();
         colLabels.clear();
-        fromClause_.reset();
+        fromClause.reset();
         if (whereClause != null) {
             whereClause.reset();
         }
@@ -237,7 +237,7 @@ public class SelectStmt extends QueryStmt {
     }
 
     public List<TableRef> getTableRefs() {
-        return fromClause_.getTableRefs();
+        return fromClause.getTableRefs();
     }
 
     public Expr getWhereClause() {
@@ -293,7 +293,7 @@ public class SelectStmt extends QueryStmt {
     @Override
     public void getTables(Analyzer analyzer, Map<Long, Table> tableMap, Set<String> parentViewNameSet) throws AnalysisException {
         getWithClauseTables(analyzer, tableMap, parentViewNameSet);
-        for (TableRef tblRef : fromClause_) {
+        for (TableRef tblRef : fromClause) {
             if (tblRef instanceof InlineViewRef) {
                 // Inline view reference
                 QueryStmt inlineStmt = ((InlineViewRef) tblRef).getViewStmt();
@@ -335,7 +335,7 @@ public class SelectStmt extends QueryStmt {
     @Override
     public void getTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) {
         getWithClauseTableRefs(analyzer, tblRefs, parentViewNameSet);
-        for (TableRef tblRef : fromClause_) {
+        for (TableRef tblRef : fromClause) {
             try {
                 TableRef tmpTblRef = analyzer.resolveTableRef(tblRef);
                 if (tmpTblRef instanceof InlineViewRef) {
@@ -360,8 +360,8 @@ public class SelectStmt extends QueryStmt {
             return true;
         }
 
-        if (withClause_ != null) {
-            List<View> views = withClause_.getViews();
+        if (withClause != null) {
+            List<View> views = withClause.getViews();
             for (View view : views) {
                 if (view.getName().equals(tblName)) {
                     return true;
@@ -398,8 +398,8 @@ public class SelectStmt extends QueryStmt {
             return;
         }
         super.analyze(analyzer);
-        fromClause_.setNeedToSql(needToSql);
-        fromClause_.analyze(analyzer);
+        fromClause.setNeedToSql(needToSql);
+        fromClause.analyze(analyzer);
 
         // Generate !empty() predicates to filter out empty collections.
         // Skip this step when analyzing a WITH-clause because CollectionTableRefs
@@ -483,7 +483,7 @@ public class SelectStmt extends QueryStmt {
         // analyze selectListExprs
         Expr.analyze(resultExprs, analyzer);
         if (TreeNode.contains(resultExprs, AnalyticExpr.class)) {
-            if (fromClause_.isEmpty()) {
+            if (fromClause.isEmpty()) {
                 throw new AnalysisException("Analytic expressions require FROM clause.");
             }
 
@@ -539,7 +539,7 @@ public class SelectStmt extends QueryStmt {
         }
 
         if (needToSql) {
-            sqlString_ = toSql();
+            sqlString = toSql();
         }
         if (analyzer.enableStarJoinReorder()) {
             LOG.debug("use old reorder logical in select stmt");
@@ -565,7 +565,7 @@ public class SelectStmt extends QueryStmt {
     public List<TupleId> getTableRefIds() {
         List<TupleId> result = Lists.newArrayList();
 
-        for (TableRef ref : fromClause_) {
+        for (TableRef ref : fromClause) {
             result.add(ref.getId());
         }
 
@@ -575,7 +575,7 @@ public class SelectStmt extends QueryStmt {
     public List<TupleId> getTableRefIdsWithoutInlineView() {
         List<TupleId> result = Lists.newArrayList();
 
-        for (TableRef ref : fromClause_) {
+        for (TableRef ref : fromClause) {
             if (ref instanceof InlineViewRef) {
                 continue;
             }
@@ -586,7 +586,7 @@ public class SelectStmt extends QueryStmt {
     }
 
     public boolean hasInlineView() {
-        for (TableRef ref : fromClause_) {
+        for (TableRef ref : fromClause) {
             if (ref instanceof InlineViewRef) {
                 return true;
             }
@@ -724,7 +724,7 @@ public class SelectStmt extends QueryStmt {
         }
 
         // materialized all lateral view column and origin column
-        for (TableRef tableRef : fromClause_.getTableRefs()) {
+        for (TableRef tableRef : fromClause.getTableRefs()) {
             if (tableRef.lateralViewRefs != null) {
                 for (LateralViewRef lateralViewRef : tableRef.lateralViewRefs) {
                     lateralViewRef.materializeRequiredSlots(baseTblSmap, analyzer);
@@ -737,7 +737,7 @@ public class SelectStmt extends QueryStmt {
         List<Pair<TableRef, Long>> candidates = Lists.newArrayList();
 
         // New pair of table ref and row count
-        for (TableRef tblRef : fromClause_) {
+        for (TableRef tblRef : fromClause) {
             if (tblRef.getJoinOp() != JoinOperator.INNER_JOIN || tblRef.hasJoinHints()) {
                 // Unsupported reorder outer join
                 return;
@@ -772,9 +772,9 @@ public class SelectStmt extends QueryStmt {
         }
 
         // can not get AST only with equal join, MayBe cross join can help
-        fromClause_.clear();
+        fromClause.clear();
         for (Pair<TableRef, Long> candidate : candidates) {
-            fromClause_.add(candidate.first);
+            fromClause.add(candidate.first);
         }
     }
 
@@ -785,14 +785,14 @@ public class SelectStmt extends QueryStmt {
         Map<TupleId, TableRef> tableRefMap = Maps.newHashMap();
 
         // set Map and push list
-        for (TableRef tblRef : fromClause_) {
+        for (TableRef tblRef : fromClause) {
             tableRefMap.put(tblRef.getId(), tblRef);
             tmpRefList.add(tblRef);
         }
         // clear tableRefList
-        fromClause_.clear();
+        fromClause.clear();
         // mark first table
-        fromClause_.add(firstRef);
+        fromClause.add(firstRef);
         tableRefMap.remove(firstRef.getId());
 
         // reserve TupleId has been added successfully
@@ -800,13 +800,13 @@ public class SelectStmt extends QueryStmt {
         validTupleId.add(firstRef.getId());
         // find table
         int i = 0;
-        while (i < fromClause_.size()) {
-            TableRef tblRef = fromClause_.get(i);
+        while (i < fromClause.size()) {
+            TableRef tblRef = fromClause.get(i);
             // get all equal
             List<Expr> eqJoinPredicates = analyzer.getEqJoinConjuncts(tblRef.getId());
-            List<TupleId> tuple_list = Lists.newArrayList();
-            Expr.getIds(eqJoinPredicates, tuple_list, null);
-            for (TupleId tid : tuple_list) {
+            List<TupleId> tupleList = Lists.newArrayList();
+            Expr.getIds(eqJoinPredicates, tupleList, null);
+            for (TupleId tid : tupleList) {
                 if (validTupleId.contains(tid)) {
                     // tid has allreday in the list of validTupleId, ignore it
                     continue;
@@ -829,7 +829,7 @@ public class SelectStmt extends QueryStmt {
                     }
 
                     if (count == 0) {
-                        fromClause_.add(candidateTableRef);
+                        fromClause.add(candidateTableRef);
                         validTupleId.add(tid);
                         tableRefMap.remove(tid);
                     }
@@ -839,8 +839,8 @@ public class SelectStmt extends QueryStmt {
         }
         // find path failed.
         if (0 != tableRefMap.size()) {
-            fromClause_.clear();
-            fromClause_.addAll(tmpRefList);
+            fromClause.clear();
+            fromClause.addAll(tmpRefList);
             return false;
         }
         return true;
@@ -852,7 +852,7 @@ public class SelectStmt extends QueryStmt {
      */
     protected void resolveInlineViewRefs(Analyzer analyzer) throws AnalysisException {
         // Gather the inline view substitution maps from the enclosed inline views
-        for (TableRef tblRef : fromClause_) {
+        for (TableRef tblRef : fromClause) {
             if (tblRef instanceof InlineViewRef) {
                 InlineViewRef inlineViewRef = (InlineViewRef) tblRef;
                 baseTblSmap = ExprSubstitutionMap.combine(baseTblSmap, inlineViewRef.getBaseTblSmap());
@@ -871,11 +871,11 @@ public class SelectStmt extends QueryStmt {
      * Expand "*" select list item.
      */
     private void expandStar(Analyzer analyzer) throws AnalysisException {
-        if (fromClause_.isEmpty()) {
+        if (fromClause.isEmpty()) {
             ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_TABLES_USED);
         }
         // expand in From clause order
-        for (TableRef tableRef : fromClause_) {
+        for (TableRef tableRef : fromClause) {
             if (analyzer.isSemiJoined(tableRef.getId())) {
                 continue;
             }
@@ -976,7 +976,7 @@ public class SelectStmt extends QueryStmt {
         }
 
         // If we're computing an aggregate, we must have a FROM clause.
-        if (fromClause_.size() == 0) {
+        if (fromClause.size() == 0) {
             throw new AnalysisException("Aggregation without a FROM clause is not allowed");
         }
 
@@ -1332,7 +1332,7 @@ public class SelectStmt extends QueryStmt {
     public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException {
         Preconditions.checkState(isAnalyzed());
         rewriteSelectList(rewriter);
-        for (TableRef ref : fromClause_) {
+        for (TableRef ref : fromClause) {
             ref.rewriteExprs(rewriter, analyzer);
         }
         // Also equal exprs in the statements of subqueries.
@@ -1388,7 +1388,7 @@ public class SelectStmt extends QueryStmt {
         }
 
         // from clause
-        for (TableRef ref : fromClause_) {
+        for (TableRef ref : fromClause) {
             Preconditions.checkState(ref.isAnalyzed);
             if (ref.onClause != null) {
                 registerExprId(ref.onClause);
@@ -1499,7 +1499,7 @@ public class SelectStmt extends QueryStmt {
         }
 
         // from clause
-        for (TableRef ref : fromClause_) {
+        for (TableRef ref : fromClause) {
             if (ref.onClause != null) {
                 ref.setOnClause(rewrittenExprMap.get(ref.onClause.getId().toString()));
             }
@@ -1627,7 +1627,7 @@ public class SelectStmt extends QueryStmt {
             } catch (UserException e) {
                 throw new AnalysisException(e.getMessage());
             }
-            fromClause_.add(inlineViewRef);
+            fromClause.add(inlineViewRef);
             expr = new SlotRef(inlineViewRef.getAliasAsName(), colAlias);
         } else if (CollectionUtils.isNotEmpty(expr.getChildren())) {
             for (int i = 0; i < expr.getChildren().size(); ++i) {
@@ -1639,12 +1639,12 @@ public class SelectStmt extends QueryStmt {
 
     @Override
     public String toSql() {
-        if (sqlString_ != null) {
-            return sqlString_;
+        if (sqlString != null) {
+            return sqlString;
         }
         StringBuilder strBuilder = new StringBuilder();
-        if (withClause_ != null) {
-            strBuilder.append(withClause_.toSql());
+        if (withClause != null) {
+            strBuilder.append(withClause.toSql());
             strBuilder.append(" ");
         }
 
@@ -1668,8 +1668,8 @@ public class SelectStmt extends QueryStmt {
         }
 
         // From clause
-        if (!fromClause_.isEmpty()) {
-            strBuilder.append(fromClause_.toSql());
+        if (!fromClause.isEmpty()) {
+            strBuilder.append(fromClause.toSql());
         }
 
         // Where clause
@@ -1712,8 +1712,8 @@ public class SelectStmt extends QueryStmt {
     @Override
     public String toDigest() {
         StringBuilder strBuilder = new StringBuilder();
-        if (withClause_ != null) {
-            strBuilder.append(withClause_.toDigest());
+        if (withClause != null) {
+            strBuilder.append(withClause.toDigest());
             strBuilder.append(" ");
         }
 
@@ -1745,8 +1745,8 @@ public class SelectStmt extends QueryStmt {
         }
 
         // From clause
-        if (!fromClause_.isEmpty()) {
-            strBuilder.append(fromClause_.toDigest());
+        if (!fromClause.isEmpty()) {
+            strBuilder.append(fromClause.toDigest());
         }
 
         // Where clause
@@ -1807,7 +1807,7 @@ public class SelectStmt extends QueryStmt {
                 tupleIdList.add(aggInfo.getOutputTupleId());
             }
         } else {
-            for (TableRef tblRef : fromClause_) {
+            for (TableRef tblRef : fromClause) {
                 tupleIdList.addAll(tblRef.getMaterializedTupleIds());
             }
         }
@@ -1824,16 +1824,16 @@ public class SelectStmt extends QueryStmt {
             throws AnalysisException, UserException {
         // analyze with clause
         if (hasWithClause()) {
-            withClause_.analyze(analyzer);
+            withClause.analyze(analyzer);
         }
         // start out with table refs to establish aliases
         TableRef leftTblRef = null;  // the one to the left of tblRef
-        for (int i = 0; i < fromClause_.size(); ++i) {
+        for (int i = 0; i < fromClause.size(); ++i) {
             // Resolve and replace non-InlineViewRef table refs with a BaseTableRef or ViewRef.
-            TableRef tblRef = fromClause_.get(i);
+            TableRef tblRef = fromClause.get(i);
             tblRef = analyzer.resolveTableRef(tblRef);
             Preconditions.checkNotNull(tblRef);
-            fromClause_.set(i, tblRef);
+            fromClause.set(i, tblRef);
             tblRef.setLeftTblRef(leftTblRef);
             tblRef.analyze(analyzer);
             leftTblRef = tblRef;
@@ -1908,7 +1908,7 @@ public class SelectStmt extends QueryStmt {
             return true;
         }
         // No from clause (base tables or inline views)
-        if (fromClause_.isEmpty()) {
+        if (fromClause.isEmpty()) {
             return true;
         }
         // Aggregation with no group by and no DISTINCT
@@ -1921,7 +1921,7 @@ public class SelectStmt extends QueryStmt {
 
     @Override
     public void collectTableRefs(List<TableRef> tblRefs) {
-        for (TableRef tblRef : fromClause_) {
+        for (TableRef tblRef : fromClause) {
             if (tblRef instanceof InlineViewRef) {
                 InlineViewRef inlineViewRef = (InlineViewRef) tblRef;
                 inlineViewRef.getViewStmt().collectTableRefs(tblRefs);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java
index 976e13eb0c..5ffac6a0b7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java
@@ -71,12 +71,12 @@ public class SetOperationStmt extends QueryStmt {
 
     // filled during analyze(); contains all operands that need to go through
     // distinct aggregation
-    protected final List<SetOperand> distinctOperands_ = Lists.newArrayList();
+    protected final List<SetOperand> distinctOperands = Lists.newArrayList();
 
     // filled during analyze(); contains all operands that can be aggregated with
     // a simple merge without duplicate elimination (also needs to merge the output
     // of the DISTINCT operands)
-    protected final List<SetOperand> allOperands_ = Lists.newArrayList();
+    protected final List<SetOperand> allOperands = Lists.newArrayList();
 
     private AggregateInfo distinctAggInfo;  // only set if we have DISTINCT ops
 
@@ -89,11 +89,11 @@ public class SetOperationStmt extends QueryStmt {
     private String toSqlString;
 
     // true if any of the operands_ references an AnalyticExpr
-    private boolean hasAnalyticExprs_ = false;
+    private boolean hasAnalyticExprs = false;
 
     // List of output expressions produced by the set operation without the ORDER BY portion
     // (if any). Same as resultExprs_ if there is no ORDER BY.
-    private List<Expr> setOpsResultExprs_ = Lists.newArrayList();
+    private List<Expr> setOpsResultExprs = Lists.newArrayList();
 
     // END: Members that need to be reset()
     /////////////////////////////////////////
@@ -114,21 +114,27 @@ public class SetOperationStmt extends QueryStmt {
                 (other.limitElement == null) ? null : other.limitElement.clone());
         operands = Lists.newArrayList();
         if (analyzer != null) {
-            for (SetOperand o: other.distinctOperands_) distinctOperands_.add(o.clone());
-            for (SetOperand o: other.allOperands_) allOperands_.add(o.clone());
-            operands.addAll(distinctOperands_);
-            operands.addAll(allOperands_);
+            for (SetOperand o: other.distinctOperands) {
+                distinctOperands.add(o.clone());
+            }
+            for (SetOperand o: other.allOperands) {
+                allOperands.add(o.clone());
+            }
+            operands.addAll(distinctOperands);
+            operands.addAll(allOperands);
         } else {
-            for (SetOperand operand: other.operands) operands.add(operand.clone());
+            for (SetOperand operand: other.operands) {
+                operands.add(operand.clone());
+            }
         }
         analyzer = other.analyzer;
         distinctAggInfo =
                 (other.distinctAggInfo != null) ? other.distinctAggInfo.clone() : null;
         tupleId = other.tupleId;
         toSqlString = (other.toSqlString != null) ? new String(other.toSqlString) : null;
-        hasAnalyticExprs_ = other.hasAnalyticExprs_;
-        withClause_ = (other.withClause_ != null) ? other.withClause_.clone() : null;
-        setOpsResultExprs_ = Expr.cloneList(other.setOpsResultExprs_);
+        hasAnalyticExprs = other.hasAnalyticExprs;
+        withClause = (other.withClause != null) ? other.withClause.clone() : null;
+        setOpsResultExprs = Expr.cloneList(other.setOpsResultExprs);
     }
 
     @Override
@@ -144,14 +150,16 @@ public class SetOperationStmt extends QueryStmt {
     @Override
     public void reset() {
         super.reset();
-        for (SetOperand op: operands) op.reset();
-        distinctOperands_.clear();
-        allOperands_.clear();
+        for (SetOperand op: operands) {
+            op.reset();
+        }
+        distinctOperands.clear();
+        allOperands.clear();
         distinctAggInfo = null;
         tupleId = null;
         toSqlString = null;
-        hasAnalyticExprs_ = false;
-        setOpsResultExprs_.clear();
+        hasAnalyticExprs = false;
+        setOpsResultExprs.clear();
     }
 
     @Override
@@ -162,20 +170,20 @@ public class SetOperationStmt extends QueryStmt {
     }
 
     public List<SetOperand> getOperands() { return operands; }
-    public List<SetOperand> getDistinctOperands() { return distinctOperands_; }
-    public boolean hasDistinctOps() { return !distinctOperands_.isEmpty(); }
-    public List<SetOperand> getAllOperands() { return allOperands_; }
-    public boolean hasAllOps() { return !allOperands_.isEmpty(); }
+    public List<SetOperand> getDistinctOperands() { return distinctOperands; }
+    public boolean hasDistinctOps() { return !distinctOperands.isEmpty(); }
+    public List<SetOperand> getAllOperands() { return allOperands; }
+    public boolean hasAllOps() { return !allOperands.isEmpty(); }
     public AggregateInfo getDistinctAggInfo() { return distinctAggInfo; }
-    public boolean hasAnalyticExprs() { return hasAnalyticExprs_; }
+    public boolean hasAnalyticExprs() { return hasAnalyticExprs; }
     public TupleId getTupleId() { return tupleId; }
 
     public void removeAllOperands() {
-        operands.removeAll(allOperands_);
-        allOperands_.clear();
+        operands.removeAll(allOperands);
+        allOperands.clear();
     }
 
-    public List<Expr> getSetOpsResultExprs() { return setOpsResultExprs_; }
+    public List<Expr> getSetOpsResultExprs() { return setOpsResultExprs; }
 
     @Override
     public void getTables(Analyzer analyzer, Map<Long, Table> tableMap, Set<String> parentViewNameSet) throws AnalysisException {
@@ -199,7 +207,9 @@ public class SetOperationStmt extends QueryStmt {
      */
     @Override
     public void analyze(Analyzer analyzer) throws UserException {
-        if (isAnalyzed()) return;
+        if (isAnalyzed()) {
+            return;
+        }
         super.analyze(analyzer);
         Preconditions.checkState(operands.size() > 0);
 
@@ -225,10 +235,10 @@ public class SetOperationStmt extends QueryStmt {
         unnestOperands(analyzer);
 
         // Compute hasAnalyticExprs_
-        hasAnalyticExprs_ = false;
+        hasAnalyticExprs = false;
         for (SetOperand op: operands) {
             if (op.hasAnalyticExprs()) {
-                hasAnalyticExprs_ = true;
+                hasAnalyticExprs = true;
                 break;
             }
         }
@@ -246,10 +256,12 @@ public class SetOperationStmt extends QueryStmt {
         createSortInfo(analyzer);
 
         // Create unnested operands' smaps.
-        for (SetOperand operand: operands) setOperandSmap(operand, analyzer);
+        for (SetOperand operand: operands) {
+            setOperandSmap(operand, analyzer);
+        }
 
         // Create distinctAggInfo, if necessary.
-        if (!distinctOperands_.isEmpty()) {
+        if (!distinctOperands.isEmpty()) {
             // Aggregate produces exactly the same tuple as the original setOp stmt.
             ArrayList<Expr> groupingExprs = Expr.cloneList(resultExprs);
             try {
@@ -261,11 +273,15 @@ public class SetOperationStmt extends QueryStmt {
             }
         }
 
-        setOpsResultExprs_ = Expr.cloneList(resultExprs);
-        if (evaluateOrderBy) createSortTupleInfo(analyzer);
+        setOpsResultExprs = Expr.cloneList(resultExprs);
+        if (evaluateOrderBy) {
+            createSortTupleInfo(analyzer);
+        }
         baseTblResultExprs = resultExprs;
 
-        if (hasOutFileClause()) outFileClause.analyze(analyzer, resultExprs);
+        if (hasOutFileClause()) {
+            outFileClause.analyze(analyzer, resultExprs);
+        }
     }
 
     /**
@@ -296,7 +312,7 @@ public class SetOperationStmt extends QueryStmt {
     private void unnestOperands(Analyzer analyzer) throws AnalysisException {
         if (operands.size() == 1) {
             // ValuesStmt for a single row.
-            allOperands_.add(operands.get(0));
+            allOperands.add(operands.get(0));
             return;
         }
         // find index of first ALL operand
@@ -313,23 +329,27 @@ public class SetOperationStmt extends QueryStmt {
         Preconditions.checkState(firstAllIdx != 1);
 
         // unnest DISTINCT operands
-        Preconditions.checkState(distinctOperands_.isEmpty());
+        Preconditions.checkState(distinctOperands.isEmpty());
         for (int i = 0; i < firstAllIdx; ++i) {
-            unnestOperand(distinctOperands_, Qualifier.DISTINCT, operands.get(i));
+            unnestOperand(distinctOperands, Qualifier.DISTINCT, operands.get(i));
         }
 
         // unnest ALL operands
-        Preconditions.checkState(allOperands_.isEmpty());
+        Preconditions.checkState(allOperands.isEmpty());
         for (int i = firstAllIdx; i < operands.size(); ++i) {
-            unnestOperand(allOperands_, Qualifier.ALL, operands.get(i));
+            unnestOperand(allOperands, Qualifier.ALL, operands.get(i));
         }
 
-        for (SetOperand op: distinctOperands_) op.setQualifier(Qualifier.DISTINCT);
-        for (SetOperand op: allOperands_) op.setQualifier(Qualifier.ALL);
+        for (SetOperand op: distinctOperands) {
+            op.setQualifier(Qualifier.DISTINCT);
+        }
+        for (SetOperand op: allOperands) {
+            op.setQualifier(Qualifier.ALL);
+        }
 
         operands.clear();
-        operands.addAll(distinctOperands_);
-        operands.addAll(allOperands_);
+        operands.addAll(distinctOperands);
+        operands.addAll(allOperands);
     }
 
     /**
@@ -487,10 +507,16 @@ public class SetOperationStmt extends QueryStmt {
                 if (slotRef == null) {
                     isNullable |= resultExpr.isNullable();
                 } else if (slotRef.getDesc().getIsNullable()
-                        || analyzer.isOuterJoined(slotRef.getDesc().getParent().getId())) isNullable = true;
-                if (op.hasAnalyticExprs()) continue;
+                        || analyzer.isOuterJoined(slotRef.getDesc().getParent().getId())) {
+                    isNullable = true;
+                }
+                if (op.hasAnalyticExprs()) {
+                    continue;
+                }
                 slotRef = resultExpr.unwrapSlotRef(true);
-                if (slotRef == null) continue;
+                if (slotRef == null) {
+                    continue;
+                }
                 // analyzer.registerValueTransfer(outputSlotRef.getSlotId(), slotRef.getSlotId());
             }
             // If all the child slots are not nullable, then the SetOps output slot should not
@@ -510,16 +536,22 @@ public class SetOperationStmt extends QueryStmt {
         TupleDescriptor tupleDesc = analyzer.getDescTbl().getTupleDesc(tupleId);
         // to keep things simple we materialize all grouping exprs = output slots,
         // regardless of what's being referenced externally
-        if (!distinctOperands_.isEmpty()) tupleDesc.materializeSlots();
+        if (!distinctOperands.isEmpty()) {
+            tupleDesc.materializeSlots();
+        }
 
-        if (evaluateOrderBy) sortInfo.materializeRequiredSlots(analyzer, null);
+        if (evaluateOrderBy) {
+            sortInfo.materializeRequiredSlots(analyzer, null);
+        }
 
         // collect operands' result exprs
         List<SlotDescriptor> outputSlots = tupleDesc.getSlots();
         List<Expr> exprs = Lists.newArrayList();
         for (int i = 0; i < outputSlots.size(); ++i) {
             SlotDescriptor slotDesc = outputSlots.get(i);
-            if (!slotDesc.isMaterialized()) continue;
+            if (!slotDesc.isMaterialized()) {
+                continue;
+            }
             for (SetOperand op: operands) {
                 exprs.add(op.getQueryStmt().getBaseTblResultExprs().get(i));
             }
@@ -574,7 +606,9 @@ public class SetOperationStmt extends QueryStmt {
 
     @Override
     public void rewriteExprs(ExprRewriter rewriter) throws AnalysisException {
-        for (SetOperand op: operands) op.getQueryStmt().rewriteExprs(rewriter);
+        for (SetOperand op: operands) {
+            op.getQueryStmt().rewriteExprs(rewriter);
+        }
         if (orderByElements != null) {
             for (OrderByElement orderByElem: orderByElements) {
                 orderByElem.setExpr(rewriter.rewrite(orderByElem.getExpr(), analyzer));
@@ -594,13 +628,17 @@ public class SetOperationStmt extends QueryStmt {
 
     @Override
     public void collectTableRefs(List<TableRef> tblRefs) {
-        for (SetOperand op: operands) op.getQueryStmt().collectTableRefs(tblRefs);
+        for (SetOperand op: operands) {
+            op.getQueryStmt().collectTableRefs(tblRefs);
+        }
     }
 
     @Override
     public List<TupleId> collectTupleIds() {
         List<TupleId> result = Lists.newArrayList();
-        for (SetOperand op: operands) result.addAll(op.getQueryStmt().collectTupleIds());
+        for (SetOperand op: operands) {
+            result.addAll(op.getQueryStmt().collectTupleIds());
+        }
         return result;
     }
 
@@ -610,8 +648,8 @@ public class SetOperationStmt extends QueryStmt {
             return toSqlString;
         }
         StringBuilder strBuilder = new StringBuilder();
-        if (withClause_ != null) {
-            strBuilder.append(withClause_.toSql());
+        if (withClause != null) {
+            strBuilder.append(withClause.toSql());
             strBuilder.append(" ");
         }
         Preconditions.checkState(operands.size() > 0);
@@ -661,8 +699,8 @@ public class SetOperationStmt extends QueryStmt {
     @Override
     public String toDigest() {
         StringBuilder strBuilder = new StringBuilder();
-        if (withClause_ != null) {
-            strBuilder.append(withClause_.toDigest());
+        if (withClause != null) {
+            strBuilder.append(withClause.toDigest());
             strBuilder.append(" ");
         }
 
@@ -744,7 +782,7 @@ public class SetOperationStmt extends QueryStmt {
 
         // Effective qualifier. Should not be reset() to preserve changes made during
         // distinct propagation and unnesting that are needed after rewriting Subqueries.
-        private Qualifier qualifier_;
+        private Qualifier qualifier;
 
         // ///////////////////////////////////////
         // BEGIN: Members that need to be reset()
@@ -756,7 +794,7 @@ public class SetOperationStmt extends QueryStmt {
         private Analyzer analyzer;
 
         // Map from SetOperationStmt's result slots to our resultExprs. Used during plan generation.
-        private final ExprSubstitutionMap smap_;
+        private final ExprSubstitutionMap smap;
 
         // END: Members that need to be reset()
         // ///////////////////////////////////////
@@ -764,8 +802,8 @@ public class SetOperationStmt extends QueryStmt {
         public SetOperand(QueryStmt queryStmt, Operation operation, Qualifier qualifier) {
             this.queryStmt = queryStmt;
             this.operation = operation;
-            qualifier_ = qualifier;
-            smap_ = new ExprSubstitutionMap();
+            this.qualifier = qualifier;
+            smap = new ExprSubstitutionMap();
         }
 
         public void analyze(Analyzer parent) throws AnalysisException, UserException {
@@ -774,7 +812,7 @@ public class SetOperationStmt extends QueryStmt {
             }
             // union statement support const expr, so not need to equal
             if (operation != Operation.UNION && queryStmt instanceof SelectStmt
-                    && ((SelectStmt) queryStmt).fromClause_.isEmpty()) {
+                    && ((SelectStmt) queryStmt).fromClause.isEmpty()) {
                 // equal select 1 to select * from (select 1) __DORIS_DUAL__ , because when using select 1 it will be
                 // transformed to a union node, select 1 is a literal, it doesn't have a tuple but will produce a slot,
                 // this will cause be core dump
@@ -792,14 +830,14 @@ public class SetOperationStmt extends QueryStmt {
                                 .set(i, new SelectListItem(item.getExpr(), col + "_" + count.toString()));
                     }
                 }
-                ((SelectStmt) queryStmt).fromClause_.add(new InlineViewRef("__DORIS_DUAL__", inlineQuery));
+                ((SelectStmt) queryStmt).fromClause.add(new InlineViewRef("__DORIS_DUAL__", inlineQuery));
                 List<SelectListItem> slist = ((SelectStmt) queryStmt).selectList.getItems();
                 slist.clear();
                 slist.add(SelectListItem.createStarItem(null));
             }
             // Oracle and ms-SQLServer do not support INTERSECT ALL and EXCEPT ALL, postgres support it,
             // but it is very ambiguous
-            if (qualifier_ == Qualifier.ALL && (operation == Operation.EXCEPT || operation == Operation.INTERSECT)) {
+            if (qualifier == Qualifier.ALL && (operation == Operation.EXCEPT || operation == Operation.INTERSECT)) {
                 throw new AnalysisException("INTERSECT and EXCEPT does not support ALL qualifier.");
             }
             analyzer = new Analyzer(parent);
@@ -808,12 +846,12 @@ public class SetOperationStmt extends QueryStmt {
 
         public boolean isAnalyzed() { return analyzer != null; }
         public QueryStmt getQueryStmt() { return queryStmt; }
-        public Qualifier getQualifier() { return qualifier_; }
+        public Qualifier getQualifier() { return qualifier; }
         public Operation getOperation() {
             return operation;
         }
         // Used for propagating DISTINCT.
-        public void setQualifier(Qualifier qualifier) { qualifier_ = qualifier; }
+        public void setQualifier(Qualifier qualifier) { this.qualifier = qualifier; }
 
         public void setOperation(Operation operation) {
             this.operation =operation;
@@ -822,7 +860,7 @@ public class SetOperationStmt extends QueryStmt {
             this.queryStmt = queryStmt;
         }
         public Analyzer getAnalyzer() { return analyzer; }
-        public ExprSubstitutionMap getSmap() { return smap_; }
+        public ExprSubstitutionMap getSmap() { return smap; }
 
         public boolean hasAnalyticExprs() {
             if (queryStmt instanceof SelectStmt) {
@@ -839,15 +877,15 @@ public class SetOperationStmt extends QueryStmt {
         private SetOperand(SetOperand other) {
             queryStmt = other.queryStmt.clone();
             this.operation = other.operation;
-            qualifier_ = other.qualifier_;
+            qualifier = other.qualifier;
             analyzer = other.analyzer;
-            smap_ = other.smap_.clone();
+            smap = other.smap.clone();
         }
 
         public void reset() {
             queryStmt.reset();
             analyzer = null;
-            smap_.clear();
+            smap.clear();
         }
 
         @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java
index 961b760807..d9bc8ac25f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java
@@ -51,9 +51,9 @@ public class ShowStreamLoadStmt extends ShowStmt {
     }
 
     private String dbName;
-    private Expr whereClause;
-    private LimitElement limitElement;
-    private List<OrderByElement> orderByElements;
+    private final Expr whereClause;
+    private final LimitElement limitElement;
+    private final List<OrderByElement> orderByElements;
 
     private String labelValue;
     private String stateValue;
@@ -61,7 +61,7 @@ public class ShowStreamLoadStmt extends ShowStmt {
 
     private ArrayList<OrderByPair> orderByPairs;
 
-    private ImmutableList<String> TITLE_NAMES = new ImmutableList.Builder<String>()
+    private static final ImmutableList<String> TITLE_NAMES = new ImmutableList.Builder<String>()
             .add("Label").add("Db").add("Table").add("User")
             .add("ClientIp").add("Status").add("Message").add("Url").add("TotalRows")
             .add("LoadedRows").add("FilteredRows").add("UnselectedRows").add("LoadBytes")
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotDescriptor.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotDescriptor.java
index 373b88b26d..e516894eda 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotDescriptor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotDescriptor.java
@@ -41,11 +41,11 @@ public class SlotDescriptor {
     private Column column;  // underlying column, if there is one
 
     // for SlotRef.toSql() in the absence of a path
-    private String label_;
+    private String label;
 
     // Expr(s) materialized into this slot; multiple exprs for unions. Should be empty if
     // path_ is set.
-    private List<Expr> sourceExprs_ = Lists.newArrayList();
+    private List<Expr> sourceExprs = Lists.newArrayList();
 
     // if false, this slot doesn't need to be materialized in parent tuple
     // (and physical layout parameters are invalid)
@@ -213,12 +213,12 @@ public class SlotDescriptor {
         return slotOffset;
     }
 
-    public String getLabel() { return label_; }
-    public void setLabel(String label) { label_ = label; }
-    public void setSourceExprs(List<Expr> exprs) { sourceExprs_ = exprs; }
-    public void setSourceExpr(Expr expr) { sourceExprs_ = Collections.singletonList(expr); }
-    public void addSourceExpr(Expr expr) { sourceExprs_.add(expr); }
-    public List<Expr> getSourceExprs() { return sourceExprs_; }
+    public String getLabel() { return label; }
+    public void setLabel(String label) { this.label = label; }
+    public void setSourceExprs(List<Expr> exprs) { sourceExprs = exprs; }
+    public void setSourceExpr(Expr expr) { sourceExprs = Collections.singletonList(expr); }
+    public void addSourceExpr(Expr expr) { sourceExprs.add(expr); }
+    public List<Expr> getSourceExprs() { return sourceExprs; }
 
 
     /**
@@ -227,7 +227,7 @@ public class SlotDescriptor {
     public void initFromExpr(Expr expr) {
         setIsNullable(expr.isNullable());
         setLabel(expr.toSql());
-        Preconditions.checkState(sourceExprs_.isEmpty());
+        Preconditions.checkState(sourceExprs.isEmpty());
         setSourceExpr(expr);
         setStats(ColumnStats.fromExpr(expr));
         Preconditions.checkState(expr.getType().isValid());
@@ -238,13 +238,25 @@ public class SlotDescriptor {
      * Return true if the physical layout of this descriptor matches the physical layout
      * of the other descriptor, but not necessarily ids.
      */
-    public boolean LayoutEquals(SlotDescriptor other) {
-        if (!getType().equals(other.getType())) return false;
-        if (isNullable != other.isNullable) return false;
-        if (getByteSize() != other.getByteSize()) return false;
-        if (getByteOffset() != other.getByteOffset()) return false;
-        if (getNullIndicatorByte() != other.getNullIndicatorByte()) return false;
-        if (getNullIndicatorBit() != other.getNullIndicatorBit()) return false;
+    public boolean layoutEquals(SlotDescriptor other) {
+        if (!getType().equals(other.getType())) {
+            return false;
+        }
+        if (isNullable != other.isNullable) {
+            return false;
+        }
+        if (getByteSize() != other.getByteSize()) {
+            return false;
+        }
+        if (getByteOffset() != other.getByteOffset()) {
+            return false;
+        }
+        if (getNullIndicatorByte() != other.getNullIndicatorByte()) {
+            return false;
+        }
+        if (getNullIndicatorBit() != other.getNullIndicatorBit()) {
+            return false;
+        }
         return true;
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotId.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotId.java
index 370eb0ebd4..6854fe2d71 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotId.java
@@ -31,9 +31,9 @@ public class SlotId extends Id<SlotId> {
     public static IdGenerator<SlotId> createGenerator() {
         return new IdGenerator<SlotId>() {
             @Override
-            public SlotId getNextId() { return new SlotId(nextId_++); }
+            public SlotId getNextId() { return new SlotId(nextId++); }
             @Override
-            public SlotId getMaxId() { return new SlotId(nextId_ - 1); }
+            public SlotId getMaxId() { return new SlotId(nextId - 1); }
         };
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotRef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotRef.java
index 788b81f55c..636fbc991e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SlotRef.java
@@ -318,7 +318,9 @@ public class SlotRef extends Expr {
     public boolean isBoundByTupleIds(List<TupleId> tids) {
         Preconditions.checkState(desc != null);
         for (TupleId tid: tids) {
-            if (tid.equals(desc.getParent().getId())) return true;
+            if (tid.equals(desc.getParent().getId())) {
+                return true;
+            }
         }
         return false;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java
index 128a88459b..09a2b2b3b1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java
@@ -49,41 +49,41 @@ public class SortInfo {
     // TODO: rethink this when we have a better cost model.
     private static final float SORT_MATERIALIZATION_COST_THRESHOLD = Expr.FUNCTION_CALL_COST;
 
-    private List<Expr> orderingExprs_;
-    private final List<Boolean> isAscOrder_;
+    private List<Expr> orderingExprs;
+    private final List<Boolean> isAscOrder;
     // True if "NULLS FIRST", false if "NULLS LAST", null if not specified.
-    private final List<Boolean> nullsFirstParams_;
+    private final List<Boolean> nullsFirstParams;
     // Subset of ordering exprs that are materialized. Populated in
     // createMaterializedOrderExprs(), used for EXPLAIN output.
-    private List<Expr> materializedOrderingExprs_;
+    private List<Expr> materializedOrderingExprs;
     // The single tuple that is materialized, sorted, and output by a sort operator
     // (i.e. SortNode or TopNNode)
-    private TupleDescriptor sortTupleDesc_;
+    private TupleDescriptor sortTupleDesc;
     // Input expressions materialized into sortTupleDesc_. One expr per slot in
     // sortTupleDesc_.
-    private List<Expr> sortTupleSlotExprs_;
+    private List<Expr> sortTupleSlotExprs;
 
     public SortInfo(List<Expr> orderingExprs, List<Boolean> isAscOrder,
                     List<Boolean> nullsFirstParams) {
         Preconditions.checkArgument(orderingExprs.size() == isAscOrder.size());
         Preconditions.checkArgument(orderingExprs.size() == nullsFirstParams.size());
-        orderingExprs_ = orderingExprs;
-        isAscOrder_ = isAscOrder;
-        nullsFirstParams_ = nullsFirstParams;
-        materializedOrderingExprs_ = Lists.newArrayList();
+        this.orderingExprs = orderingExprs;
+        this.isAscOrder = isAscOrder;
+        this.nullsFirstParams = nullsFirstParams;
+        materializedOrderingExprs = Lists.newArrayList();
     }
 
     /**
      * C'tor for cloning.
      */
     private SortInfo(SortInfo other) {
-        orderingExprs_ = Expr.cloneList(other.orderingExprs_);
-        isAscOrder_ = Lists.newArrayList(other.isAscOrder_);
-        nullsFirstParams_ = Lists.newArrayList(other.nullsFirstParams_);
-        materializedOrderingExprs_ = Expr.cloneList(other.materializedOrderingExprs_);
-        sortTupleDesc_ = other.sortTupleDesc_;
-        if (other.sortTupleSlotExprs_ != null) {
-            sortTupleSlotExprs_ = Expr.cloneList(other.sortTupleSlotExprs_);
+        orderingExprs = Expr.cloneList(other.orderingExprs);
+        isAscOrder = Lists.newArrayList(other.isAscOrder);
+        nullsFirstParams = Lists.newArrayList(other.nullsFirstParams);
+        materializedOrderingExprs = Expr.cloneList(other.materializedOrderingExprs);
+        sortTupleDesc = other.sortTupleDesc;
+        if (other.sortTupleSlotExprs != null) {
+            sortTupleSlotExprs = Expr.cloneList(other.sortTupleSlotExprs);
         }
     }
 
@@ -95,31 +95,31 @@ public class SortInfo {
     public void setMaterializedTupleInfo(
             TupleDescriptor tupleDesc, List<Expr> tupleSlotExprs) {
         Preconditions.checkState(tupleDesc.getSlots().size() == tupleSlotExprs.size());
-        sortTupleDesc_ = tupleDesc;
-        sortTupleSlotExprs_ = tupleSlotExprs;
-        for (int i = 0; i < sortTupleDesc_.getSlots().size(); ++i) {
-            SlotDescriptor slotDesc = sortTupleDesc_.getSlots().get(i);
-            slotDesc.setSourceExpr(sortTupleSlotExprs_.get(i));
+        sortTupleDesc = tupleDesc;
+        sortTupleSlotExprs = tupleSlotExprs;
+        for (int i = 0; i < sortTupleDesc.getSlots().size(); ++i) {
+            SlotDescriptor slotDesc = sortTupleDesc.getSlots().get(i);
+            slotDesc.setSourceExpr(sortTupleSlotExprs.get(i));
         }
     }
 
-    public List<Expr> getOrderingExprs() { return orderingExprs_; }
-    public List<Boolean> getIsAscOrder() { return isAscOrder_; }
-    public List<Boolean> getNullsFirstParams() { return nullsFirstParams_; }
-    public List<Expr> getMaterializedOrderingExprs() { return materializedOrderingExprs_; }
-    public List<Expr> getSortTupleSlotExprs() { return sortTupleSlotExprs_; }
-    public TupleDescriptor getSortTupleDescriptor() { return sortTupleDesc_; }
+    public List<Expr> getOrderingExprs() { return orderingExprs; }
+    public List<Boolean> getIsAscOrder() { return isAscOrder; }
+    public List<Boolean> getNullsFirstParams() { return nullsFirstParams; }
+    public List<Expr> getMaterializedOrderingExprs() { return materializedOrderingExprs; }
+    public List<Expr> getSortTupleSlotExprs() { return sortTupleSlotExprs; }
+    public TupleDescriptor getSortTupleDescriptor() { return sortTupleDesc; }
 
     /**
      * Gets the list of booleans indicating whether nulls come first or last, independent
      * of asc/desc.
      */
     public List<Boolean> getNullsFirst() {
-        Preconditions.checkState(orderingExprs_.size() == nullsFirstParams_.size());
+        Preconditions.checkState(orderingExprs.size() == nullsFirstParams.size());
         List<Boolean> nullsFirst = Lists.newArrayList();
-        for (int i = 0; i < orderingExprs_.size(); ++i) {
-            nullsFirst.add(OrderByElement.nullsFirst(nullsFirstParams_.get(i),
-                    isAscOrder_.get(i)));
+        for (int i = 0; i < orderingExprs.size(); ++i) {
+            nullsFirst.add(OrderByElement.nullsFirst(nullsFirstParams.get(i),
+                    isAscOrder.get(i)));
         }
         return nullsFirst;
     }
@@ -130,15 +130,15 @@ public class SortInfo {
      * applying the 'smap'.
      */
     public void materializeRequiredSlots(Analyzer analyzer, ExprSubstitutionMap smap) {
-        Preconditions.checkNotNull(sortTupleDesc_);
-        Preconditions.checkNotNull(sortTupleSlotExprs_);
-        Preconditions.checkState(sortTupleDesc_.isMaterialized());
-        analyzer.materializeSlots(orderingExprs_);
-        List<SlotDescriptor> sortTupleSlotDescs = sortTupleDesc_.getSlots();
+        Preconditions.checkNotNull(sortTupleDesc);
+        Preconditions.checkNotNull(sortTupleSlotExprs);
+        Preconditions.checkState(sortTupleDesc.isMaterialized());
+        analyzer.materializeSlots(orderingExprs);
+        List<SlotDescriptor> sortTupleSlotDescs = sortTupleDesc.getSlots();
         List<Expr> materializedExprs = Lists.newArrayList();
         for (int i = 0; i < sortTupleSlotDescs.size(); ++i) {
             if (sortTupleSlotDescs.get(i).isMaterialized()) {
-                materializedExprs.add(sortTupleSlotExprs_.get(i));
+                materializedExprs.add(sortTupleSlotExprs.get(i));
             }
         }
         List<Expr> substMaterializedExprs =
@@ -147,15 +147,15 @@ public class SortInfo {
     }
 
     public void substituteOrderingExprs(ExprSubstitutionMap smap, Analyzer analyzer) {
-        orderingExprs_ = Expr.substituteList(orderingExprs_, smap, analyzer, false);
+        orderingExprs = Expr.substituteList(orderingExprs, smap, analyzer, false);
     }
 
     /**
      * Asserts that all ordering exprs are bound by the sort tuple.
      */
     public void checkConsistency() {
-        for (Expr orderingExpr: orderingExprs_) {
-            Preconditions.checkState(orderingExpr.isBound(sortTupleDesc_.getId()));
+        for (Expr orderingExpr: orderingExprs) {
+            Preconditions.checkState(orderingExpr.isBound(sortTupleDesc.getId()));
         }
     }
 
@@ -193,7 +193,7 @@ public class SortInfo {
         Set<SlotRef> sourceSlots = Sets.newHashSet();
         TreeNode.collect(Expr.substituteList(resultExprs, substOrderBy, analyzer, false),
                 Predicates.instanceOf(SlotRef.class), sourceSlots);
-        TreeNode.collect(Expr.substituteList(orderingExprs_, substOrderBy, analyzer, false),
+        TreeNode.collect(Expr.substituteList(orderingExprs, substOrderBy, analyzer, false),
                 Predicates.instanceOf(SlotRef.class), sourceSlots);
         for (SlotRef origSlotRef: sourceSlots) {
             // Don't rematerialize slots that are already in the sort tuple.
@@ -231,7 +231,7 @@ public class SortInfo {
     public ExprSubstitutionMap createMaterializedOrderExprs(
             TupleDescriptor sortTupleDesc, Analyzer analyzer) {
         ExprSubstitutionMap substOrderBy = new ExprSubstitutionMap();
-        for (Expr origOrderingExpr : orderingExprs_) {
+        for (Expr origOrderingExpr : orderingExprs) {
             // TODO(zc): support materialized order exprs
             // if (!origOrderingExpr.hasCost()
             //         || origOrderingExpr.getCost() > SORT_MATERIALIZATION_COST_THRESHOLD
@@ -250,7 +250,7 @@ public class SortInfo {
                 materializedDesc.setIsMaterialized(true);
                 SlotRef materializedRef = new SlotRef(materializedDesc);
                 substOrderBy.put(origOrderingExpr, materializedRef);
-                materializedOrderingExprs_.add(origOrderingExpr);
+                materializedOrderingExprs.add(origOrderingExpr);
             }
         }
         return substOrderBy;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java
index 897c5a7b6d..92ffaa6bbf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java
@@ -72,7 +72,9 @@ public abstract class StatementBase implements ParseNode {
      * tables/views get collected in the Analyzer before failing analyze().
      */
     public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
-        if (isAnalyzed()) return;
+        if (isAnalyzed()) {
+            return;
+        }
         this.analyzer = analyzer;
         if (Strings.isNullOrEmpty(analyzer.getClusterName())) {
             ErrorReport.reportAnalysisException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER);
@@ -129,7 +131,9 @@ public abstract class StatementBase implements ParseNode {
      */
     public void setColLabels(List<String> colLabels) {
         List<String> oldLabels = getColLabels();
-        if (oldLabels == colLabels) return;
+        if (oldLabels == colLabels) {
+            return;
+        }
         oldLabels.clear();
         oldLabels.addAll(colLabels);
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java
index 8ce0529714..64695fd6bb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java
@@ -92,7 +92,7 @@ public class StmtRewriter {
             throws AnalysisException {
         SelectStmt result = stmt;
         // Rewrite all the subqueries in the FROM clause.
-        for (TableRef tblRef : result.fromClause_) {
+        for (TableRef tblRef : result.fromClause) {
             if (!(tblRef instanceof InlineViewRef)) {
                 continue;
             }
@@ -118,7 +118,7 @@ public class StmtRewriter {
                 && result.getHavingClauseAfterAnaylzed().getSubquery() != null) {
             result = rewriteHavingClauseSubqueries(result, analyzer);
         }
-        result.sqlString_ = null;
+        result.sqlString = null;
         if (LOG.isDebugEnabled()) {
             LOG.debug("rewritten stmt: " + result.toSql());
         }
@@ -403,7 +403,7 @@ public class StmtRewriter {
     private static void rewriteWhereClauseSubqueries(
             SelectStmt stmt, Analyzer analyzer)
             throws AnalysisException {
-        int numTableRefs = stmt.fromClause_.size();
+        int numTableRefs = stmt.fromClause.size();
         ArrayList<Expr> exprsWithSubqueries = Lists.newArrayList();
         ExprSubstitutionMap smap = new ExprSubstitutionMap();
         // Check if all the conjuncts in the WHERE clause that contain subqueries
@@ -613,9 +613,9 @@ public class StmtRewriter {
         } catch (UserException e) {
             throw new AnalysisException(e.getMessage());
         }
-        inlineView.setLeftTblRef(stmt.fromClause_.get(stmt.fromClause_.size() - 1));
+        inlineView.setLeftTblRef(stmt.fromClause.get(stmt.fromClause.size() - 1));
 
-        stmt.fromClause_.add(inlineView);
+        stmt.fromClause.add(inlineView);
         JoinOperator joinOp = JoinOperator.LEFT_SEMI_JOIN;
 
         // Create a join conjunct from the expr that contains a subquery.
@@ -786,7 +786,7 @@ public class StmtRewriter {
      * replacing an unqualified star item.
      */
     private static void replaceUnqualifiedStarItems(SelectStmt stmt, int tableIdx) {
-        Preconditions.checkState(tableIdx < stmt.fromClause_.size());
+        Preconditions.checkState(tableIdx < stmt.fromClause.size());
         ArrayList<SelectListItem> newItems = Lists.newArrayList();
         for (int i = 0; i < stmt.selectList.getItems().size(); ++i) {
             SelectListItem item = stmt.selectList.getItems().get(i);
@@ -797,7 +797,7 @@ public class StmtRewriter {
             // '*' needs to be replaced by tbl1.*,...,tbln.*, where
             // tbl1,...,tbln are the visible tableRefs in stmt.
             for (int j = 0; j < tableIdx; ++j) {
-                TableRef tableRef = stmt.fromClause_.get(j);
+                TableRef tableRef = stmt.fromClause.get(j);
                 if (tableRef.getJoinOp() == JoinOperator.LEFT_SEMI_JOIN ||
                         tableRef.getJoinOp() == JoinOperator.LEFT_ANTI_JOIN) {
                     continue;
@@ -1171,8 +1171,8 @@ public class StmtRewriter {
         }
         SelectStmt selectStmt = (SelectStmt) statementBase;
         boolean reAnalyze = false;
-        for (int i = 0; i < selectStmt.fromClause_.size(); i++) {
-            TableRef tableRef = selectStmt.fromClause_.get(i);
+        for (int i = 0; i < selectStmt.fromClause.size(); i++) {
+            TableRef tableRef = selectStmt.fromClause.get(i);
             // Recursively rewrite subquery
             if (tableRef instanceof InlineViewRef) {
                 InlineViewRef viewRef = (InlineViewRef) tableRef;
@@ -1203,7 +1203,7 @@ public class StmtRewriter {
                     null,
                     null,
                     LimitElement.NO_LIMIT);
-            selectStmt.fromClause_.set(i, new InlineViewRef(tableRef.getAliasAsName().getTbl(), stmt));
+            selectStmt.fromClause.set(i, new InlineViewRef(tableRef.getAliasAsName().getTbl(), stmt));
             selectStmt.analyze(analyzer);
             reAnalyze = true;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java
index c29a2ada22..8365bbbc16 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java
@@ -109,7 +109,9 @@ public class Subquery extends Expr {
         }
 
         // If the subquery returns many rows, set its type to MultiRowType.
-        if (!((SelectStmt)stmt).returnsSingleRow()) type = new MultiRowType(type);
+        if (!((SelectStmt)stmt).returnsSingleRow()) {
+            type = new MultiRowType(type);
+        }
 
         // Preconditions.checkNotNull(type);
         // type.analyze();
@@ -138,7 +140,9 @@ public class Subquery extends Expr {
         // Check if we have unique labels
         List<String> labels = stmt.getColLabels();
         boolean hasUniqueLabels = true;
-        if (Sets.newHashSet(labels).size() != labels.size()) hasUniqueLabels = false;
+        if (Sets.newHashSet(labels).size() != labels.size()) {
+            hasUniqueLabels = false;
+        }
 
         // Construct a StructField from each expr in the select list
         for (int i = 0; i < stmtResultExprs.size(); ++i) {
@@ -182,7 +186,9 @@ public class Subquery extends Expr {
      */
     @Override
     public boolean equals(Object o) {
-        if (!super.equals(o)) return false;
+        if (!super.equals(o)) {
+            return false;
+        }
         return stmt.toSql().equals(((Subquery)o).stmt.toSql());
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
index 7bf3790584..f64af5449f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableRef.java
@@ -86,10 +86,10 @@ public class TableRef implements ParseNode, Writable {
     // analysis. By convention, for table refs with multiple implicit aliases, aliases_[0]
     // contains the fully-qualified implicit alias to ensure that aliases_[0] always
     // uniquely identifies this table ref regardless of whether it has an explicit alias.
-    protected String[] aliases_;
+    protected String[] aliases;
 
     // Indicates whether this table ref is given an explicit alias,
-    protected boolean hasExplicitAlias_;
+    protected boolean hasExplicitAlias;
 
     protected JoinOperator joinOp;
     protected List<String> usingColNames;
@@ -115,14 +115,14 @@ public class TableRef implements ParseNode, Writable {
     // we may alter the chain of table refs during plan generation, but we still rely
     // on the original list of ids for correct predicate assignment.
     // Populated in analyzeJoin().
-    protected List<TupleId> allTableRefIds_ = Lists.newArrayList();
-    protected List<TupleId> allMaterializedTupleIds_ = Lists.newArrayList();
+    protected List<TupleId> allTableRefIds = Lists.newArrayList();
+    protected List<TupleId> allMaterializedTupleIds = Lists.newArrayList();
 
     // All physical tuple ids that this table ref is correlated with:
     // Tuple ids of root descriptors from outer query blocks that this table ref
     // (if a CollectionTableRef) or contained CollectionTableRefs (if an InlineViewRef)
     // are rooted at. Populated during analysis.
-    protected List<TupleId> correlatedTupleIds_ = Lists.newArrayList();
+    protected List<TupleId> correlatedTupleIds = Lists.newArrayList();
 
     // analysis output
     protected TupleDescriptor desc;
@@ -153,10 +153,10 @@ public class TableRef implements ParseNode, Writable {
             if (Catalog.isStoredTableNamesLowerCase()) {
                 alias = alias.toLowerCase();
             }
-            aliases_ = new String[]{alias};
-            hasExplicitAlias_ = true;
+            aliases = new String[]{alias};
+            hasExplicitAlias = true;
         } else {
-            hasExplicitAlias_ = false;
+            hasExplicitAlias = false;
         }
         this.partitionNames = partitionNames;
         this.commonHints = commonHints;
@@ -167,8 +167,8 @@ public class TableRef implements ParseNode, Writable {
     // this will reset all the 'analyzed' stuff
     protected TableRef(TableRef other) {
         name = other.name;
-        aliases_ = other.aliases_;
-        hasExplicitAlias_ = other.hasExplicitAlias_;
+        aliases = other.aliases;
+        hasExplicitAlias = other.hasExplicitAlias;
         joinOp = other.joinOp;
         // NOTE: joinHints and sortHints maybe changed after clone. so we new one List.
         joinHints =
@@ -185,9 +185,9 @@ public class TableRef implements ParseNode, Writable {
         // table refs is the responsibility of the statement.
         leftTblRef = null;
         isAnalyzed = other.isAnalyzed;
-        allTableRefIds_ = Lists.newArrayList(other.allTableRefIds_);
-        allMaterializedTupleIds_ = Lists.newArrayList(other.allMaterializedTupleIds_);
-        correlatedTupleIds_ = Lists.newArrayList(other.correlatedTupleIds_);
+        allTableRefIds = Lists.newArrayList(other.allTableRefIds);
+        allMaterializedTupleIds = Lists.newArrayList(other.allMaterializedTupleIds);
+        correlatedTupleIds = Lists.newArrayList(other.correlatedTupleIds);
         desc = other.desc;
         lateralViewRefs = null;
         if (other.lateralViewRefs != null) {
@@ -297,7 +297,7 @@ public class TableRef implements ParseNode, Writable {
      * an outer query block.
      */
     public boolean isCorrelated() {
-        return !correlatedTupleIds_.isEmpty();
+        return !correlatedTupleIds.isEmpty();
     }
 
     public Table getTable() {
@@ -440,14 +440,14 @@ public class TableRef implements ParseNode, Writable {
         analyzeJoinHints();
 
         // Populate the lists of all table ref and materialized tuple ids.
-        allTableRefIds_.clear();
-        allMaterializedTupleIds_.clear();
+        allTableRefIds.clear();
+        allMaterializedTupleIds.clear();
         if (leftTblRef != null) {
-            allTableRefIds_.addAll(leftTblRef.getAllTableRefIds());
-            allMaterializedTupleIds_.addAll(leftTblRef.getAllMaterializedTupleIds());
+            allTableRefIds.addAll(leftTblRef.getAllTableRefIds());
+            allMaterializedTupleIds.addAll(leftTblRef.getAllMaterializedTupleIds());
         }
-        allTableRefIds_.add(getId());
-        allMaterializedTupleIds_.addAll(getMaterializedTupleIds());
+        allTableRefIds.add(getId());
+        allMaterializedTupleIds.addAll(getMaterializedTupleIds());
 
         if (usingColNames != null) {
             // Turn USING clause into equivalent ON clause.
@@ -620,7 +620,7 @@ public class TableRef implements ParseNode, Writable {
      */
     public List<TupleId> getAllTableRefIds() {
         Preconditions.checkState(isAnalyzed);
-        return allTableRefIds_;
+        return allTableRefIds;
     }
 
     /**
@@ -713,7 +713,7 @@ public class TableRef implements ParseNode, Writable {
      * Returns all legal aliases of this table ref.
      */
     public String[] getAliases() {
-        return aliases_;
+        return aliases;
     }
 
     /**
@@ -722,7 +722,7 @@ public class TableRef implements ParseNode, Writable {
      * be ambiguous).
      */
     public String getUniqueAlias() {
-        return aliases_[0];
+        return aliases[0];
     }
 
     /**
@@ -731,14 +731,16 @@ public class TableRef implements ParseNode, Writable {
      * nested collection refs have only a single implicit alias.
      */
     public boolean hasExplicitAlias() {
-        return hasExplicitAlias_;
+        return hasExplicitAlias;
     }
 
     /**
      * Returns the explicit alias if this table ref has one, null otherwise.
      */
     public String getExplicitAlias() {
-        if (hasExplicitAlias()) return getUniqueAlias();
+        if (hasExplicitAlias()) {
+            return getUniqueAlias();
+        }
         return null;
     }
 
@@ -787,9 +789,9 @@ public class TableRef implements ParseNode, Writable {
             onClause.reset();
         }
         leftTblRef = null;
-        allTableRefIds_.clear();
-        allMaterializedTupleIds_.clear();
-        correlatedTupleIds_.clear();
+        allTableRefIds.clear();
+        allMaterializedTupleIds.clear();
+        correlatedTupleIds.clear();
         desc = null;
         if (lateralViewRefs != null) {
             for (LateralViewRef lateralViewRef : lateralViewRefs) {
@@ -814,8 +816,8 @@ public class TableRef implements ParseNode, Writable {
         if (partitionNames != null) {
             sb.append(partitionNames.toSql());
         }
-        if (aliases_ != null && aliases_.length > 0) {
-            sb.append(" AS ").append(aliases_[0]);
+        if (aliases != null && aliases.length > 0) {
+            sb.append(" AS ").append(aliases[0]);
         }
         return sb.toString();
     }
@@ -847,7 +849,7 @@ public class TableRef implements ParseNode, Writable {
 
         if (in.readBoolean()) {
             String alias = Text.readString(in);
-            aliases_ = new String[]{alias};
+            aliases = new String[]{alias};
         }
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleDescriptor.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleDescriptor.java
index 45fab98fa8..d5fc90ff10 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleDescriptor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleDescriptor.java
@@ -50,12 +50,12 @@ public class TupleDescriptor {
     private TableRef ref;
 
     // All legal aliases of this tuple.
-    private String[] aliases_;
+    private String[] aliases;
 
     // If true, requires that aliases_.length() == 1. However, aliases_.length() == 1
     // does not imply an explicit alias because nested collection refs have only a
     // single implicit alias.
-    private boolean hasExplicitAlias_;
+    private boolean hasExplicitAlias;
 
     // if false, this tuple doesn't need to be materialized
     private boolean isMaterialized = true;
@@ -97,8 +97,8 @@ public class TupleDescriptor {
         return ref;
     }
 
-    public void setRef(TableRef new_ref) {
-        ref = new_ref;
+    public void setRef(TableRef tableRef) {
+        ref = tableRef;
     }
 
     public ArrayList<SlotDescriptor> getSlots() {
@@ -131,7 +131,9 @@ public class TupleDescriptor {
     public ArrayList<SlotDescriptor> getMaterializedSlots() {
         ArrayList<SlotDescriptor> result = Lists.newArrayList();
         for (SlotDescriptor slot : slots) {
-            if (slot.isMaterialized()) result.add(slot);
+            if (slot.isMaterialized()) {
+                result.add(slot);
+            }
         }
         return result;
     }
@@ -175,13 +177,13 @@ public class TupleDescriptor {
     }
 
     public void setAliases(String[] aliases, boolean hasExplicitAlias) {
-        aliases_ = aliases;
-        hasExplicitAlias_ = hasExplicitAlias;
+        this.aliases = aliases;
+        this.hasExplicitAlias = hasExplicitAlias;
     }
-    public boolean hasExplicitAlias() { return hasExplicitAlias_; }
-    public String getAlias() { return (aliases_ != null) ? aliases_[0] : null; }
+    public boolean hasExplicitAlias() { return hasExplicitAlias; }
+    public String getAlias() { return (aliases != null) ? aliases[0] : null; }
     public TableName getAliasAsName() {
-        return (aliases_ != null) ? new TableName(null, aliases_[0]) : null;
+        return (aliases != null) ? new TableName(null, aliases[0]) : null;
     }
 
     public TTupleDescriptor toThrift() {
@@ -328,7 +330,9 @@ public class TupleDescriptor {
      * Materialize all slots.
      */
     public void materializeSlots() {
-        for (SlotDescriptor slot: slots) slot.setIsMaterialized(true);
+        for (SlotDescriptor slot: slots) {
+            slot.setIsMaterialized(true);
+        }
     }
 
     public void getTableIdToColumnNames(Map<Long, Set<String>> tableIdToColumnNames) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleId.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleId.java
index 0104c090f8..a3424ea467 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleId.java
@@ -34,9 +34,9 @@ public class TupleId extends Id<TupleId> {
     public static IdGenerator<TupleId> createGenerator() {
         return new IdGenerator<TupleId>() {
             @Override
-            public TupleId getNextId() { return new TupleId(nextId_++); }
+            public TupleId getNextId() { return new TupleId(nextId++); }
             @Override
-            public TupleId getMaxId() { return new TupleId(nextId_ - 1); }
+            public TupleId getMaxId() { return new TupleId(nextId - 1); }
         };
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleIsNullPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleIsNullPredicate.java
index f944c1f763..9b67d4cd1a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleIsNullPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TupleIsNullPredicate.java
@@ -31,6 +31,7 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
 import java.util.List;
+import java.util.Objects;
 
 /**
  * Internal expr that returns true if all of the given tuples are NULL, otherwise false.
@@ -64,7 +65,9 @@ public class TupleIsNullPredicate extends Predicate {
     @Override
     public boolean isBoundByTupleIds(List<TupleId> tids) {
         for (TupleId tid : tids) {
-            if (tupleIds.contains(tid)) return true;
+            if (tupleIds.contains(tid)) {
+                return true;
+            }
         }
         return false;
     }
@@ -87,6 +90,10 @@ public class TupleIsNullPredicate extends Predicate {
         return tupleIds;
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(super.hashCode());
+    }
 
     @Override
     public boolean equals(Object o) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java
index a55e3c6786..b87878fa4e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java
@@ -45,16 +45,16 @@ import java.io.IOException;
 // cmy@192.168.%
 // cmy@[domain.name]
 public class UserIdentity implements Writable {
-    
+
     @SerializedName(value = "user")
     private String user;
-    
+
     @SerializedName(value = "host")
     private String host;
-    
+
     @SerializedName(value = "isDomain")
     private boolean isDomain;
-    
+
     private boolean isAnalyzed = false;
 
     public static final UserIdentity ROOT;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java
index 729bac76a0..eb631146dc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java
@@ -56,7 +56,7 @@ public class WithClause implements ParseNode {
     /////////////////////////////////////////
     // BEGIN: Members that need to be reset()
 
-    private final ArrayList<View> views_;
+    private final ArrayList<View> views;
 
     // END: Members that need to be reset()
     /////////////////////////////////////////
@@ -64,7 +64,7 @@ public class WithClause implements ParseNode {
     public WithClause(ArrayList<View> views) {
         Preconditions.checkNotNull(views);
         Preconditions.checkState(!views.isEmpty());
-        views_ = views;
+        this.views = views;
     }
 
     /**
@@ -80,8 +80,10 @@ public class WithClause implements ParseNode {
         // that local views registered in parent blocks are visible here.
         Analyzer withClauseAnalyzer = Analyzer.createWithNewGlobalState(analyzer);
         withClauseAnalyzer.setIsWithClause();
-        if (analyzer.isExplain()) withClauseAnalyzer.setIsExplain();
-        for (View view: views_) {
+        if (analyzer.isExplain()) {
+            withClauseAnalyzer.setIsExplain();
+        }
+        for (View view: views) {
             Analyzer viewAnalyzer = new Analyzer(withClauseAnalyzer);
             view.getQueryStmt().analyze(viewAnalyzer);
             // Register this view so that the next view can reference it.
@@ -98,19 +100,21 @@ public class WithClause implements ParseNode {
      */
     private WithClause(WithClause other) {
         Preconditions.checkNotNull(other);
-        views_ = Lists.newArrayList();
-        for (View view: other.views_) {
-            views_.add(new View(view.getName(), view.getQueryStmt().clone(),
+        views = Lists.newArrayList();
+        for (View view: other.views) {
+            views.add(new View(view.getName(), view.getQueryStmt().clone(),
                     view.getOriginalColLabels()));
         }
     }
 
     public void reset() {
-        for (View view: views_) view.getQueryStmt().reset();
+        for (View view: views) {
+            view.getQueryStmt().reset();
+        }
     }
 
     public void getTables(Analyzer analyzer, Map<Long, Table> tableMap, Set<String> parentViewNameSet) throws AnalysisException {
-        for (View view : views_) {
+        for (View view : views) {
             QueryStmt stmt = view.getQueryStmt();
             parentViewNameSet.add(view.getName());
             stmt.getTables(analyzer, tableMap, parentViewNameSet);
@@ -118,7 +122,7 @@ public class WithClause implements ParseNode {
     }
 
     public void getTableRefs(Analyzer analyzer, List<TableRef> tblRefs, Set<String> parentViewNameSet) {
-        for (View view : views_) {
+        for (View view : views) {
             QueryStmt stmt = view.getQueryStmt();
             parentViewNameSet.add(view.getName());
             stmt.getTableRefs(analyzer, tblRefs, parentViewNameSet);
@@ -131,7 +135,7 @@ public class WithClause implements ParseNode {
     @Override
     public String toSql() {
         List<String> viewStrings = Lists.newArrayList();
-        for (View view: views_) {
+        for (View view: views) {
             // Enclose the view alias and explicit labels in quotes if Hive cannot parse it
             // without quotes. This is needed for view compatibility between Impala and Hive.
             String aliasSql = ToSqlUtils.getIdentSql(view.getName());
@@ -146,7 +150,7 @@ public class WithClause implements ParseNode {
 
     public String toDigest() {
         List<String> viewStrings = Lists.newArrayList();
-        for (View view : views_) {
+        for (View view : views) {
             // Enclose the view alias and explicit labels in quotes if Hive cannot parse it
             // without quotes. This is needed for view compatibility between Impala and Hive.
             String aliasSql = ToSqlUtils.getIdentSql(view.getName());
@@ -159,5 +163,5 @@ public class WithClause implements ParseNode {
         return "WITH " + Joiner.on(",").join(viewStrings);
     }
 
-    public List<View> getViews() { return views_; }
+    public List<View> getViews() { return views; }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/HDFSStorage.java b/fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
similarity index 97%
rename from fe/fe-core/src/main/java/org/apache/doris/backup/HDFSStorage.java
rename to fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
index 8544cf3a47..5db9b0810c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/HDFSStorage.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/HdfsStorage.java
@@ -22,7 +22,7 @@ import org.apache.doris.common.UserException;
 import java.util.Map;
 
 // TODO: extend BlobStorage
-public class HDFSStorage {
+public class HdfsStorage {
     public static final String HDFS_DEFAULT_FS = "fs.defaultFS";
 
     public static void checkHDFS(Map<String, String> properties) throws UserException {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java
index 5445253cac..ec25ade6ee 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateType.java
@@ -87,13 +87,13 @@ public enum AggregateType {
 
         primitiveTypeList.clear();
         // all types except object stored column type, such as bitmap hll quantile_state.
-        EnumSet<PrimitiveType> exc_object_stored = EnumSet.allOf(PrimitiveType.class);
-        exc_object_stored.remove(PrimitiveType.BITMAP);
-        exc_object_stored.remove(PrimitiveType.HLL);
-        exc_object_stored.remove(PrimitiveType.QUANTILE_STATE);
-        compatibilityMap.put(REPLACE, EnumSet.copyOf(exc_object_stored));
+        EnumSet<PrimitiveType> excObjectStored = EnumSet.allOf(PrimitiveType.class);
+        excObjectStored.remove(PrimitiveType.BITMAP);
+        excObjectStored.remove(PrimitiveType.HLL);
+        excObjectStored.remove(PrimitiveType.QUANTILE_STATE);
+        compatibilityMap.put(REPLACE, EnumSet.copyOf(excObjectStored));
 
-        compatibilityMap.put(REPLACE_IF_NOT_NULL, EnumSet.copyOf(exc_object_stored));
+        compatibilityMap.put(REPLACE_IF_NOT_NULL, EnumSet.copyOf(excObjectStored));
 
         primitiveTypeList.clear();
         primitiveTypeList.add(PrimitiveType.HLL);
@@ -107,7 +107,7 @@ public enum AggregateType {
         primitiveTypeList.add(PrimitiveType.QUANTILE_STATE);
         compatibilityMap.put(QUANTILE_UNION, EnumSet.copyOf(primitiveTypeList));
 
-        compatibilityMap.put(NONE, EnumSet.copyOf(exc_object_stored));
+        compatibilityMap.put(NONE, EnumSet.copyOf(excObjectStored));
     }
     private final String sqlName;
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/AliasFunction.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/AliasFunction.java
index 0329352d69..447cd705ba 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/AliasFunction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/AliasFunction.java
@@ -185,6 +185,8 @@ public class AliasFunction extends Function {
                             typeDefParams.add(scalarType.getLenStr());
                         }
                         break;
+                    default:
+                        throw new AnalysisException("Alias type is invalid: " + primitiveType);
                 }
             }
         } else {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ArrayType.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/ArrayType.java
index f3adcc67ee..bbc517a81a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ArrayType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ArrayType.java
@@ -27,6 +27,8 @@ import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.gson.annotations.SerializedName;
 
+import java.util.Objects;
+
 /**
  * Describes an ARRAY type.
  */
@@ -90,6 +92,11 @@ public class ArrayType extends Type {
         return String.format("ARRAY<%s>", itemType.toSql(depth + 1));
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(itemType);
+    }
+
     @Override
     public boolean equals(Object other) {
         if (!(other instanceof ArrayType)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
index 69fe069258..1458f63c38 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
@@ -2440,6 +2440,7 @@ public class Catalog {
                             LOG.error(msg);
                             Util.stdoutWithTime(msg);
                             System.exit(-1);
+                            break;
                         }
                         default:
                             break;
@@ -5262,7 +5263,7 @@ public class Catalog {
     public EsRepository getEsRepository() {
         return this.esRepository;
     }
-    
+
     public PolicyMgr getPolicyMgr() {
         return this.policyMgr;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
index 2da813b79f..9d6144fede 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java
@@ -43,6 +43,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 /**
  * This class represents the column-related metadata.
@@ -526,6 +527,13 @@ public class Column implements Writable {
         return toSql();
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(name, getDataType(), aggregationType, isAggregationTypeImplicit,
+                isKey, isAllowNull, getDefaultValue(), getStrLen(), getPrecision(), getScale(),
+                comment, visible, children);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (obj == this) {
@@ -644,10 +652,13 @@ public class Column implements Writable {
                 break;
             case ARRAY:
                 sb.append(type.toString());
+                break;
             case MAP:
                 sb.append(type.toString());
+                break;
             case STRUCT:
                 sb.append(type.toString());
+                break;
             default:
                 sb.append(typeStringMap.get(dataType));
                 break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ColumnStats.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/ColumnStats.java
index 441c9af726..6a7a852946 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ColumnStats.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ColumnStats.java
@@ -30,6 +30,7 @@ import org.apache.logging.log4j.Logger;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Objects;
 
 /**
  * Statistics for a single column.
@@ -138,6 +139,11 @@ public class ColumnStats implements Writable {
         return columnStats;
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(avgSerializedSize, maxSize, numDistinctValues, numNulls);
+    }
+
     public boolean equals(Object obj) {
         if (this == obj) {
             return true;
@@ -176,9 +182,13 @@ public class ColumnStats implements Writable {
       ColumnStats stats = new ColumnStats(expr.getType().getPrimitiveType());
       stats.setNumDistinctValues(expr.getNumDistinctValues());
       SlotRef slotRef = expr.unwrapSlotRef();
-      if (slotRef == null) return stats;
+      if (slotRef == null) {
+          return stats;
+      }
       ColumnStats slotStats = slotRef.getDesc().getStats();
-      if (slotStats == null) return stats;
+      if (slotStats == null) {
+          return stats;
+      }
       stats.numNulls = slotStats.getNumNulls();
       stats.avgSerializedSize = slotStats.getAvgSerializedSize();
       stats.maxSize = slotStats.getMaxSize();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/DataProperty.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/DataProperty.java
index 250cbbb332..5340e7a89d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/DataProperty.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/DataProperty.java
@@ -30,6 +30,7 @@ import com.google.gson.annotations.SerializedName;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Objects;
 
 public class DataProperty implements Writable {
     public static final DataProperty DEFAULT_DATA_PROPERTY = new DataProperty(
@@ -109,6 +110,11 @@ public class DataProperty implements Writable {
         remoteCooldownTimeMs = MAX_COOLDOWN_TIME_MS;
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(storageMedium, cooldownTimeMs, remoteStorageResourceName, remoteCooldownTimeMs);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (this == obj) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java
index 3c3534b5e0..945315d391 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java
@@ -49,6 +49,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Objects;
 import java.util.Optional;
 import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
@@ -681,6 +682,11 @@ public class Database extends MetaObject implements Writable {
         }
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(id, fullQualifiedName, dataQuotaBytes);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (this == obj) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Function.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Function.java
index c1caa4313c..49ba2b6f36 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Function.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Function.java
@@ -600,7 +600,7 @@ public class Function implements Writable {
         }
 
         public static FunctionType fromCode(int code) {
-            switch (code) {
+            switch (code) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                 case 0:
                     return ORIGIN;
                 case 1:
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSet.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSet.java
index 2193317f93..4b94052643 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSet.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/FunctionSet.java
@@ -41,7 +41,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-public class FunctionSet<min_initIN9doris_udf12DecimalV2ValEEEvPNS2_15FunctionContextEPT_> {
+public class FunctionSet<T> {
     private static final Logger LOG = LogManager.getLogger(FunctionSet.class);
 
     // All of the registered user functions. The key is the user facing name (e.g. "myUdf"),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
index ac92ca9cb5..9c69ef787f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/OdbcTable.java
@@ -80,8 +80,9 @@ public class OdbcTable extends Table {
         switch (tableType) {
             case MYSQL:
                 return mysqlProperName(name);
+            default:
+                return name;
         }
-        return name;
     }
 
     private String odbcCatalogResourceName;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Partition.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Partition.java
index abf23a3203..75e64b6292 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Partition.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Partition.java
@@ -244,6 +244,7 @@ public class Partition extends MetaObject implements Writable {
                 break;
             case SHADOW:
                 indices.addAll(idToShadowIndex.values());
+                break;
             default:
                 break;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/PartitionKey.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/PartitionKey.java
index 72c79a95d4..3b44365626 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/PartitionKey.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/PartitionKey.java
@@ -189,16 +189,16 @@ public class PartitionKey implements Comparable<PartitionKey>, Writable {
     // compare with other PartitionKey. used for partition prune
     @Override
     public int compareTo(PartitionKey other) {
-        int this_key_len = this.keys.size();
-        int other_key_len = other.keys.size();
-        int min_len = Math.min(this_key_len, other_key_len);
-        for (int i = 0; i < min_len; ++i) {
+        int thisKeyLen = this.keys.size();
+        int otherKeyLen = other.keys.size();
+        int minLen = Math.min(thisKeyLen, otherKeyLen);
+        for (int i = 0; i < minLen; ++i) {
             int ret = compareLiteralExpr(this.getKeys().get(i), other.getKeys().get(i));
             if (0 != ret) {
                 return ret;
             }
         }
-        return Integer.compare(this_key_len, other_key_len);
+        return Integer.compare(thisKeyLen, otherKeyLen);
     }
 
     // return: ("100", "200", "300")
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ReplicaAllocation.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/ReplicaAllocation.java
index 6b02270fad..80ad94349a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ReplicaAllocation.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ReplicaAllocation.java
@@ -99,8 +99,12 @@ public class ReplicaAllocation implements Writable {
 
     @Override
     public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
         ReplicaAllocation that = (ReplicaAllocation) o;
         return that.allocMap.equals(this.allocMap);
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceType.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceType.java
index d687b457f8..d66054be43 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ResourceType.java
@@ -100,8 +100,9 @@ public enum ResourceType {
                 return HDD_READ_MBPS;
             case TRESOURCE_HDD_WRITE_MBPS:
                 return HDD_WRITE_MBPS;
+            default:
+                return null;
         }
-        return null;
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ScalarFunction.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/ScalarFunction.java
index 527e81357a..299160cc43 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ScalarFunction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ScalarFunction.java
@@ -175,7 +175,9 @@ public class ScalarFunction extends Function {
             }
         }
         String beClass = usesDecimal ? "DecimalOperators" : "Operators";
-        if (usesDecimalV2) beClass = "DecimalV2Operators";
+        if (usesDecimalV2) {
+            beClass = "DecimalV2Operators";
+        }
         String symbol = "doris::" + beClass + "::" + beFn;
         return createBuiltinOperator(name, symbol, argTypes, retType, nullableMode);
     }
@@ -245,7 +247,9 @@ public class ScalarFunction extends Function {
             }
         }
         String beClass = usesDecimal ? "DecimalOperators" : "Operators";
-        if (usesDecimalV2) beClass = "DecimalV2Operators";
+        if (usesDecimalV2) {
+            beClass = "DecimalV2Operators";
+        }
         String symbol = "doris::" + beClass + "::" + beFn;
         return createVecBuiltinOperator(name, symbol, argTypes, retType, nullableMode);
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Type.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Type.java
index e23302d96c..758e65e8d4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Type.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Type.java
@@ -480,7 +480,9 @@ public abstract class Type {
      * MAP<STRING,STRUCT<f1:INT>> --> 3
      */
     private boolean exceedsMaxNestingDepth(int d) {
-        if (d >= MAX_NESTING_DEPTH) return true;
+        if (d >= MAX_NESTING_DEPTH) {
+            return true;
+        }
         if (isStructType()) {
             StructType structType = (StructType) this;
             for (StructField f : structType.getFields()) {
@@ -579,7 +581,7 @@ public abstract class Type {
         TTypeNode node = col.getTypes().get(nodeIdx);
         Type type = null;
         int tmpNodeIdx = nodeIdx;
-        switch (node.getType()) {
+        switch (node.getType()) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case SCALAR: {
                 Preconditions.checkState(node.isSetScalarType());
                 TScalarType scalarType = node.getScalarType();
@@ -661,8 +663,12 @@ public abstract class Type {
      * Null is returned for for data types where the column size is not applicable.
      */
     public Integer getColumnSize() {
-        if (!isScalarType()) return null;
-        if (isNumericType()) return getPrecision();
+        if (!isScalarType()) {
+            return null;
+        }
+        if (isNumericType()) {
+            return getPrecision();
+        }
         ScalarType t = (ScalarType) this;
         switch (t.getPrimitiveType()) {
             case CHAR:
@@ -711,7 +717,9 @@ public abstract class Type {
      * For non-numeric types, returns null.
      */
     public Integer getPrecision() {
-        if (!isScalarType()) return null;
+        if (!isScalarType()) {
+            return null;
+        }
         ScalarType t = (ScalarType) this;
         switch (t.getPrimitiveType()) {
             case TINYINT:
@@ -742,7 +750,9 @@ public abstract class Type {
      * component.
      */
     public Integer getDecimalDigits() {
-        if (!isScalarType()) return null;
+        if (!isScalarType()) {
+            return null;
+        }
         ScalarType t = (ScalarType) this;
         switch (t.getPrimitiveType()) {
             case BOOLEAN:
@@ -775,7 +785,9 @@ public abstract class Type {
      * types where NUM_PREC_RADIX is not applicable.
      */
     public Integer getNumPrecRadix() {
-        if (!isScalarType()) return null;
+        if (!isScalarType()) {
+            return null;
+        }
         ScalarType t = (ScalarType) this;
         switch (t.getPrimitiveType()) {
             case TINYINT:
@@ -1046,15 +1058,27 @@ public abstract class Type {
                 PrimitiveType t1 = PrimitiveType.values()[i];
                 PrimitiveType t2 = PrimitiveType.values()[j];
                 // DECIMAL, NULL, and INVALID_TYPE  are handled separately.
-                if (t1 == PrimitiveType.INVALID_TYPE ||
-                        t2 == PrimitiveType.INVALID_TYPE) continue;
-                if (t1 == PrimitiveType.NULL_TYPE || t2 == PrimitiveType.NULL_TYPE) continue;
-                if (t1 == PrimitiveType.ARRAY || t2 == PrimitiveType.ARRAY) continue;
-                if (t1 == PrimitiveType.DECIMALV2 || t2 == PrimitiveType.DECIMALV2) continue;
-                if (t1 == PrimitiveType.TIME || t2 == PrimitiveType.TIME) continue;
-                if (t1 == PrimitiveType.ARRAY || t2 == PrimitiveType.ARRAY) continue;
-                if (t1 == PrimitiveType.MAP || t2 == PrimitiveType.MAP) continue;
-                if (t1 == PrimitiveType.STRUCT || t2 == PrimitiveType.STRUCT) continue;
+                if (t1 == PrimitiveType.INVALID_TYPE || t2 == PrimitiveType.INVALID_TYPE) {
+                    continue;
+                }
+                if (t1 == PrimitiveType.NULL_TYPE || t2 == PrimitiveType.NULL_TYPE) {
+                    continue;
+                }
+                if (t1 == PrimitiveType.DECIMALV2 || t2 == PrimitiveType.DECIMALV2) {
+                    continue;
+                }
+                if (t1 == PrimitiveType.TIME || t2 == PrimitiveType.TIME) {
+                    continue;
+                }
+                if (t1 == PrimitiveType.ARRAY || t2 == PrimitiveType.ARRAY) {
+                    continue;
+                }
+                if (t1 == PrimitiveType.MAP || t2 == PrimitiveType.MAP) {
+                    continue;
+                }
+                if (t1 == PrimitiveType.STRUCT || t2 == PrimitiveType.STRUCT) {
+                    continue;
+                }
                 Preconditions.checkNotNull(compatibilityMatrix[i][j]);
             }
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/View.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/View.java
index 08c7e359ff..c842776257 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/View.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/View.java
@@ -85,7 +85,7 @@ public class View extends Table {
     private boolean isLocalView;
 
     // Set if this View is from a WITH clause with column labels.
-    private List<String> colLabels_;
+    private List<String> colLabels;
 
     // Used for read from image
     public View() {
@@ -106,7 +106,7 @@ public class View extends Table {
         super(-1, alias, TableType.VIEW, null);
         this.isLocalView = true;
         this.queryStmt = queryStmt;
-        colLabels_ = colLabels;
+        this.colLabels = colLabels;
     }
 
     public boolean isLocalView() {
@@ -188,7 +188,7 @@ public class View extends Table {
     /**
      * Returns the column labels the user specified in the WITH-clause.
      */
-    public List<String> getOriginalColLabels() { return colLabels_; }
+    public List<String> getOriginalColLabels() { return colLabels; }
 
     /**
      * Returns the explicit column labels for this view, or null if they need to be derived
@@ -197,17 +197,19 @@ public class View extends Table {
      */
     public List<String> getColLabels() {
         QueryStmt stmt = getQueryStmt();
-        if (colLabels_ == null) return null;
-        if (colLabels_.size() >= stmt.getColLabels().size()) {
-            return colLabels_;
+        if (colLabels == null) {
+            return null;
+        }
+        if (colLabels.size() >= stmt.getColLabels().size()) {
+            return colLabels;
         }
-        List<String> explicitColLabels = Lists.newArrayList(colLabels_);
-        explicitColLabels.addAll(stmt.getColLabels().subList(colLabels_.size(), stmt.getColLabels().size()));
+        List<String> explicitColLabels = Lists.newArrayList(colLabels);
+        explicitColLabels.addAll(stmt.getColLabels().subList(colLabels.size(), stmt.getColLabels().size()));
         return explicitColLabels;
     }
 
     public boolean hasColLabels() {
-        return colLabels_ != null;
+        return colLabels != null;
     }
 
     // Get the md5 of signature string of this view.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/DynamicPartitionScheduler.java b/fe/fe-core/src/main/java/org/apache/doris/clone/DynamicPartitionScheduler.java
index 761404e6ef..4728803fc7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/clone/DynamicPartitionScheduler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/clone/DynamicPartitionScheduler.java
@@ -80,7 +80,7 @@ public class DynamicPartitionScheduler extends MasterDaemon {
     public static final String CREATE_PARTITION_MSG = "createPartitionMsg";
     public static final String DROP_PARTITION_MSG = "dropPartitionMsg";
 
-    private final String DEFAULT_RUNTIME_VALUE = FeConstants.null_string;
+    private static final String DEFAULT_RUNTIME_VALUE = FeConstants.null_string;
 
     private Map<Long, Map<String, String>> runtimeInfos = Maps.newConcurrentMap();
     private Set<Pair<Long, Long>> dynamicPartitionTableInfo = Sets.newConcurrentHashSet();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/TwoDimensionalGreedyRebalanceAlgo.java b/fe/fe-core/src/main/java/org/apache/doris/clone/TwoDimensionalGreedyRebalanceAlgo.java
index 2ce36a371a..3590246355 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/clone/TwoDimensionalGreedyRebalanceAlgo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/clone/TwoDimensionalGreedyRebalanceAlgo.java
@@ -67,8 +67,12 @@ public class TwoDimensionalGreedyRebalanceAlgo {
 
         @Override
         public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
+            if (this == o) {
+                return true;
+            }
+            if (o == null || getClass() != o.getClass()) {
+                return false;
+            }
             PartitionMove that = (PartitionMove) o;
             return Objects.equal(partitionId, that.partitionId) &&
                     Objects.equal(indexId, that.indexId) &&
@@ -292,9 +296,9 @@ public class TwoDimensionalGreedyRebalanceAlgo {
             moveOneReplica(move.fromBe, move.toBe, newInfo.beByReplicaCount);
 
             skewMap.remove(skew, partitionBalanceInfo);
-            long min_count = newInfo.beByReplicaCount.keySet().first();
-            long max_count = newInfo.beByReplicaCount.keySet().last();
-            skewMap.put(max_count - min_count, newInfo);
+            long minCount = newInfo.beByReplicaCount.keySet().first();
+            long maxCount = newInfo.beByReplicaCount.keySet().last();
+            skewMap.put(maxCount - minCount, newInfo);
         } catch (IllegalStateException e) {
             // If touch IllegalState, the skew map doesn't be modified, so we should rollback the move of beByTotalReplicaCount
             moveOneReplica(move.toBe, move.fromBe, beByTotalReplicaCount);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/IdGenerator.java b/fe/fe-core/src/main/java/org/apache/doris/common/IdGenerator.java
index 1085c297bd..6f025a396c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/IdGenerator.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/IdGenerator.java
@@ -25,7 +25,7 @@ package org.apache.doris.common;
  * Subclasses of Id should be able to create a generator for their Id type.
  */
 public abstract class IdGenerator<IdType extends Id<IdType>> {
-    protected int nextId_ = 0;
+    protected int nextId = 0;
     public abstract IdType getNextId();
     public abstract IdType getMaxId();
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/ThriftServerEventProcessor.java b/fe/fe-core/src/main/java/org/apache/doris/common/ThriftServerEventProcessor.java
index 4acf2fa2d6..253eaf5ee5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/ThriftServerEventProcessor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/ThriftServerEventProcessor.java
@@ -57,7 +57,7 @@ public class ThriftServerEventProcessor implements TServerEventHandler {
         // param input is class org.apache.thrift.protocol.TBinaryProtocol
         TSocket tSocket = null;
         TTransport transport = input.getTransport();
-        switch (thriftServer.getType()) {
+        switch (thriftServer.getType()) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case THREADED_SELECTOR:
                 // class org.apache.thrift.transport.TFramedTransport
                 Preconditions.checkState(transport instanceof TFramedTransport);
@@ -71,6 +71,7 @@ public class ThriftServerEventProcessor implements TServerEventHandler {
                 Preconditions.checkState(transport instanceof TSocket);
                 tSocket = (TSocket) transport;
                 break;
+                // CHECKSTYLE IGNORE THIS LINE
         }
         if (tSocket == null) {
             LOG.warn("fail to get client socket. server type: {}", thriftServer.getType());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/TreeNode.java b/fe/fe-core/src/main/java/org/apache/doris/common/TreeNode.java
index 7529d04fad..22de70a1e3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/TreeNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/TreeNode.java
@@ -62,7 +62,9 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public int numNodes() {
         int numNodes = 1;
-        for (NodeType child: children) numNodes += child.numNodes();
+        for (NodeType child: children) {
+            numNodes += child.numNodes();
+        }
         return numNodes;
     }
 
@@ -82,7 +84,9 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
             matches.add((D) this);
             return;
         }
-        for (NodeType child: children) child.collect(predicate, matches);
+        for (NodeType child: children) {
+            child.collect(predicate, matches);
+        }
     }
 
     /**
@@ -96,7 +100,9 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
             matches.add((D) this);
             return;
         }
-        for (NodeType child: children) child.collect(cl, matches);
+        for (NodeType child: children) {
+            child.collect(cl, matches);
+        }
     }
 
     /**
@@ -106,8 +112,12 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public <C extends TreeNode<NodeType>, D extends C> void collectAll(
             Predicate<? super C> predicate, List<D> matches) {
-        if (predicate.apply((C) this)) matches.add((D) this);
-        for (NodeType child: children) child.collectAll(predicate, matches);
+        if (predicate.apply((C) this)) {
+            matches.add((D) this);
+        }
+        for (NodeType child: children) {
+            child.collectAll(predicate, matches);
+        }
     }
 
     /**
@@ -116,7 +126,9 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public static <C extends TreeNode<C>, D extends C> void collect(
             Collection<C> nodeList, Predicate<? super C> predicate, Collection<D> matches) {
-        for (C node: nodeList) node.collect(predicate, matches);
+        for (C node: nodeList) {
+            node.collect(predicate, matches);
+        }
     }
 
     /**
@@ -125,7 +137,9 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public static <C extends TreeNode<C>, D extends C> void collect(
             Collection<C> nodeList, Class cl, Collection<D> matches) {
-        for (C node: nodeList) node.collect(cl, matches);
+        for (C node: nodeList) {
+            node.collect(cl, matches);
+        }
     }
 
     public boolean contains(Class cl) {
@@ -145,8 +159,14 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public <C extends TreeNode<NodeType>> boolean contains(
             Predicate<? super C> predicate) {
-        if (predicate.apply((C) this)) return true;
-        for (NodeType child: children) if (child.contains(predicate)) return true;
+        if (predicate.apply((C) this)) {
+            return true;
+        }
+        for (NodeType child: children) {
+            if (child.contains(predicate)) {
+                return true;
+            }
+        }
         return false;
     }
 
@@ -156,7 +176,11 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public static <C extends TreeNode<C>, D extends C> boolean contains(
             Collection<C> nodeList, Predicate<? super C> predicate) {
-        for (C node: nodeList) if (node.contains(predicate)) return true;
+        for (C node: nodeList) {
+            if (node.contains(predicate)) {
+                return true;
+            }
+        }
         return false;
     }
 
@@ -165,7 +189,11 @@ public class TreeNode<NodeType extends TreeNode<NodeType>> {
      */
     public static <C extends TreeNode<C>> boolean contains(
             List<C> nodeList, Class cl) {
-        for (C node: nodeList) if (node.contains(cl)) return true;
+        for (C node: nodeList) {
+            if (node.contains(cl)) {
+                return true;
+            }
+        }
         return false;
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEProcDir.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEProcDir.java
index 84bf4a7d21..ef9b8ca5a5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEProcDir.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEProcDir.java
@@ -28,8 +28,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
-// SHOW PROC "/bdbje"
-public class BDBJEProcDir implements ProcDirInterface  {
+/**
+ * SHOW PROC "/bdbje".
+ */
+public class BDBJEProcDir implements ProcDirInterface  { // CHECKSTYLE IGNORE THIS LINE: BDBJE should use uppercase
     public static final ImmutableList<String> TITLE_NAMES = new ImmutableList.Builder<String>()
             .add("DbNames").add("JournalNumber").add("Comment").build();
 
@@ -40,7 +42,7 @@ public class BDBJEProcDir implements ProcDirInterface  {
 
     @Override
     public ProcNodeInterface lookup(String dbName) throws AnalysisException {
-        return new BDBJEDatabaseProcDir(dbName);
+        return new BdbjeDatabaseProcDir(dbName);
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEDatabaseProcDir.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/BdbjeDatabaseProcDir.java
similarity index 92%
rename from fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEDatabaseProcDir.java
rename to fe/fe-core/src/main/java/org/apache/doris/common/proc/BdbjeDatabaseProcDir.java
index 36b568f837..a002fb46eb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEDatabaseProcDir.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/BdbjeDatabaseProcDir.java
@@ -27,13 +27,13 @@ import com.google.common.collect.Lists;
 import java.util.List;
 
 // SHOW PROC "/bdbje/dbname/"
-public class BDBJEDatabaseProcDir implements ProcDirInterface  {
+public class BdbjeDatabaseProcDir implements ProcDirInterface  {
     public static final ImmutableList<String> TITLE_NAMES = new ImmutableList.Builder<String>()
             .add("JournalId").build();
 
     private String dbName;
 
-    public BDBJEDatabaseProcDir(String dbName){
+    public BdbjeDatabaseProcDir(String dbName){
         this.dbName = dbName;
     }
 
@@ -44,7 +44,7 @@ public class BDBJEDatabaseProcDir implements ProcDirInterface  {
 
     @Override
     public ProcNodeInterface lookup(String journalId) throws AnalysisException {
-        return new BDBJEJournalDataProcNode(dbName, Long.valueOf(journalId));
+        return new BdbjeJournalDataProcNode(dbName, Long.valueOf(journalId));
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEJournalDataProcNode.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/BdbjeJournalDataProcNode.java
similarity index 94%
rename from fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEJournalDataProcNode.java
rename to fe/fe-core/src/main/java/org/apache/doris/common/proc/BdbjeJournalDataProcNode.java
index ab36a0f4d7..c8c197b8ca 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/BDBJEJournalDataProcNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/BdbjeJournalDataProcNode.java
@@ -26,7 +26,7 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 
 // SHOW PROC "/bdbje/dbname/journalID"
-public class BDBJEJournalDataProcNode implements ProcNodeInterface  {
+public class BdbjeJournalDataProcNode implements ProcNodeInterface  {
 
     public static final ImmutableList<String> TITLE_NAMES = new ImmutableList.Builder<String>()
             .add("JournalId").add("OpType").add("Data").build();
@@ -34,7 +34,7 @@ public class BDBJEJournalDataProcNode implements ProcNodeInterface  {
     private String dbName;
     private Long journalId;
 
-    public BDBJEJournalDataProcNode(String dbName, Long journalId) {
+    public BdbjeJournalDataProcNode(String dbName, Long journalId) {
         this.dbName = dbName;
         this.journalId = journalId;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/TabletHealthProcDir.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/TabletHealthProcDir.java
index 66108bac92..bd8ecdb846 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/TabletHealthProcDir.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/TabletHealthProcDir.java
@@ -195,7 +195,7 @@ public class TabletHealthProcDir implements ProcDirInterface {
                                             replicaAlloc, aliveBeIdsInCluster);
                                     res = pair.first;
                                 }
-                                switch (res) {
+                                switch (res) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                                     case HEALTHY:
                                         healthyNum++;
                                         break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/util/ReflectionUtils.java b/fe/fe-core/src/main/java/org/apache/doris/common/util/ReflectionUtils.java
index 4bdd0d5be5..f51ae5f674 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/util/ReflectionUtils.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/ReflectionUtils.java
@@ -79,14 +79,14 @@ public class ReflectionUtils {
      * @param title a string title for the stack trace
      */
     public static void printThreadInfo(PrintWriter stream, String title) {
-        final int STACK_DEPTH = 20;
+        final int stackDepth = 20;
         boolean contention = threadBean.isThreadContentionMonitoringEnabled();
         long[] threadIds = threadBean.getAllThreadIds();
         stream.println("Process Thread Dump: " + title);
         stream.println(threadIds.length + " active threads");
 
         for (long tid: threadIds) {
-            ThreadInfo info = threadBean.getThreadInfo(tid, STACK_DEPTH);
+            ThreadInfo info = threadBean.getThreadInfo(tid, stackDepth);
             if (info == null) {
                 stream.println("  Inactive");
                 continue;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/util/Util.java b/fe/fe-core/src/main/java/org/apache/doris/common/util/Util.java
index abbecd3684..891852d860 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/util/Util.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/Util.java
@@ -388,7 +388,7 @@ public class Util {
     // not support encode negative value now
     public static void encodeVarint64(long source, DataOutput out) throws IOException {
         assert source >= 0;
-        short B = 128;
+        short B = 128; // CHECKSTYLE IGNORE THIS LINE
 
         while (source > B) {
             out.write((int)(source & (B - 1) | B));
@@ -401,7 +401,7 @@ public class Util {
     public static long decodeVarint64(DataInput in) throws IOException {
         long result = 0;
         int shift = 0;
-        short B = 128;
+        short B = 128; // CHECKSTYLE IGNORE THIS LINE
 
         while (true) {
             int oneByte = in.readUnsignedByte();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/elasticsearch/EsNodeInfo.java b/fe/fe-core/src/main/java/org/apache/doris/external/elasticsearch/EsNodeInfo.java
index 384c291f43..b8c757e7b0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/external/elasticsearch/EsNodeInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/external/elasticsearch/EsNodeInfo.java
@@ -164,8 +164,12 @@ public class EsNodeInfo {
 
     @Override
     public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
 
         EsNodeInfo nodeInfo = (EsNodeInfo) o;
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/controller/HelpController.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/controller/HelpController.java
index 986b141b04..1fa5abf5c8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/controller/HelpController.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/controller/HelpController.java
@@ -97,14 +97,14 @@ public class HelpController {
             List<String> topics = module.listTopicByCategory(categories.get(0));
 
             if (topics.size() > 0) {
-                List<Map<String, String>> topic_list = new ArrayList<>();
+                List<Map<String, String>> topicList = new ArrayList<>();
                 result.put("topicSize", topics.size());
                 for (String topic : topics) {
                     Map<String, String> top = new HashMap<>();
                     top.put("name", topic);
-                    topic_list.add(top);
+                    topicList.add(top);
                 }
-                result.put("topicdatas", topic_list);
+                result.put("topicdatas", topicList);
             }
 
             List<String> subCategories = module.listCategoryByCategory(categories.get(0));
@@ -119,15 +119,15 @@ public class HelpController {
                 result.put("subdatas", subCate);
             }
         } else {
-            List<Map<String, String>> category_list = new ArrayList<>();
+            List<Map<String, String>> categoryList = new ArrayList<>();
             if (categories.size() > 0) {
                 result.put("categoriesSize", categories.size());
                 for (String cate : categories) {
                     Map<String, String> subMap = new HashMap<>();
                     subMap.put("name", cate);
-                    category_list.add(subMap);
+                    categoryList.add(subMap);
                 }
-                result.put("categoryDatas", category_list);
+                result.put("categoryDatas", categoryList);
             }
         }
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/TableQueryPlanAction.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/TableQueryPlanAction.java
index 6afa9dfd82..302ef1b4e1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/TableQueryPlanAction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/TableQueryPlanAction.java
@@ -218,26 +218,26 @@ public class TableQueryPlanAction extends RestBaseController {
         UUID uuid = UUID.randomUUID();
         tQueryPlanInfo.query_id = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
 
-        Map<Long, TTabletVersionInfo> tablet_info = new HashMap<>();
+        Map<Long, TTabletVersionInfo> tabletInfo = new HashMap<>();
         // acquire resolved tablet distribution
         Map<String, Node> tabletRoutings = assemblePrunedPartitions(scanRangeLocations);
         tabletRoutings.forEach((tabletId, node) -> {
             long tablet = Long.parseLong(tabletId);
-            tablet_info.put(tablet, new TTabletVersionInfo(tablet, node.version, 0L /*version hash*/, node.schemaHash));
+            tabletInfo.put(tablet, new TTabletVersionInfo(tablet, node.version, 0L /*version hash*/, node.schemaHash));
         });
-        tQueryPlanInfo.tablet_info = tablet_info;
+        tQueryPlanInfo.tablet_info = tabletInfo;
 
         // serialize TQueryPlanInfo and encode plan with Base64 to string in order to translate by json format
         TSerializer serializer = new TSerializer();
-        String opaqued_query_plan;
+        String opaquedQueryPlan;
         try {
-            byte[] query_plan_stream = serializer.serialize(tQueryPlanInfo);
-            opaqued_query_plan = Base64.getEncoder().encodeToString(query_plan_stream);
+            byte[] queryPlanStream = serializer.serialize(tQueryPlanInfo);
+            opaquedQueryPlan = Base64.getEncoder().encodeToString(queryPlanStream);
         } catch (TException e) {
             throw new DorisHttpException(HttpResponseStatus.INTERNAL_SERVER_ERROR, "TSerializer failed to serialize PlanFragment, reason [ " + e.getMessage() + " ]");
         }
         result.put("partitions", tabletRoutings);
-        result.put("opaqued_query_plan", opaqued_query_plan);
+        result.put("opaqued_query_plan", opaquedQueryPlan);
         result.put("status", 200);
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/manager/NodeAction.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/manager/NodeAction.java
index b5b4a0a693..79a0b8276b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/manager/NodeAction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/manager/NodeAction.java
@@ -154,11 +154,11 @@ public class NodeAction extends RestBaseController {
     @Getter
     @Setter
     public static class NodeInfo {
-        public List<String> column_names;
+        public List<String> columnNames;
         public List<List<String>> rows;
 
-        public NodeInfo(List<String> column_names, List<List<String>> rows) {
-            this.column_names = column_names;
+        public NodeInfo(List<String> columnNames, List<List<String>> rows) {
+            this.columnNames = columnNames;
             this.rows = rows;
         }
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/HttpUtil.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/HttpUtil.java
index 8def076535..e066c32c23 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/HttpUtil.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/HttpUtil.java
@@ -49,8 +49,9 @@ public class HttpUtil {
         BufferedReader reader = null;
         try {
             reader = request.getReader();
-            while (null != (line = reader.readLine()))
+            while (null != (line = reader.readLine())) {
                 data.append(new String(line.getBytes("utf-8")));
+            }
         } catch (IOException e) {
         } finally {
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/LoadSubmitter.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/LoadSubmitter.java
index 6544c9a434..ab66660663 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/LoadSubmitter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/LoadSubmitter.java
@@ -150,6 +150,7 @@ public class LoadSubmitter {
         }
     }
 
+    // CHECKSTYLE OFF: These name must match the name in json, case-sensitive.
     public static class SubmitResult {
         public String TxnId;
         public String Label;
@@ -169,4 +170,5 @@ public class LoadSubmitter {
         public String CommitAndPublishTimeMs;
         public String ErrorURL;
     }
+    // CHECKSTYLE ON
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/StatementSubmitter.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/StatementSubmitter.java
index 16a8b086ff..aeb2b91ba4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/StatementSubmitter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/util/StatementSubmitter.java
@@ -124,7 +124,9 @@ public class StatementSubmitter {
                     LOG.warn("failed to close stmt", se2);
                 }
                 try {
-                    if (conn != null) conn.close();
+                    if (conn != null) {
+                        conn.close();
+                    }
                 } catch (SQLException se) {
                     LOG.warn("failed to close connection", se);
                 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBEnvironment.java b/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBEnvironment.java
index c4d7559c6a..773e20c5ff 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBEnvironment.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBEnvironment.java
@@ -19,8 +19,8 @@ package org.apache.doris.journal.bdbje;
 
 import org.apache.doris.catalog.Catalog;
 import org.apache.doris.common.Config;
-import org.apache.doris.ha.BDBHA;
 import org.apache.doris.ha.BDBStateChangeListener;
+import org.apache.doris.ha.BDBHA;
 import org.apache.doris.ha.HAProtocol;
 
 import com.sleepycat.je.Database;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBJEJournal.java b/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBJEJournal.java
index 686867eec6..4799fc22d6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBJEJournal.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/journal/bdbje/BDBJEJournal.java
@@ -55,7 +55,7 @@ import java.util.concurrent.atomic.AtomicLong;
  * Finally, close this journal.
  * This class encapsulates the read, write APIs of bdbje
  */
-public class BDBJEJournal implements Journal {
+public class BDBJEJournal implements Journal { // CHECKSTYLE IGNORE THIS LINE: BDBJE should use uppercase
     public static final Logger LOG = LogManager.getLogger(BDBJEJournal.class);
     private static final int OUTPUT_BUFFER_INIT_SIZE = 128;
     private static final int RETRY_TIME = 3;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapPrivsChecker.java b/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapPrivsChecker.java
index 5b4fa42a35..62c4897adb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapPrivsChecker.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/ldap/LdapPrivsChecker.java
@@ -216,7 +216,9 @@ public class LdapPrivsChecker {
 
     public static Map<TablePattern, PrivBitSet> getLdapAllDbPrivs(UserIdentity userIdentity) {
         Map<TablePattern, PrivBitSet> ldapDbPrivs = Maps.newConcurrentMap();
-        if (!hasLdapPrivs(userIdentity)) return ldapDbPrivs;
+        if (!hasLdapPrivs(userIdentity)) {
+            return ldapDbPrivs;
+        }
         for (Map.Entry<TablePattern, PrivBitSet> entry : ConnectContext.get().getLdapGroupsPrivs()
                 .getTblPatternToPrivs().entrySet()) {
             if (entry.getKey().getPrivLevel().equals(PaloAuth.PrivLevel.DATABASE)) {
@@ -228,7 +230,9 @@ public class LdapPrivsChecker {
 
     public static Map<TablePattern, PrivBitSet> getLdapAllTblPrivs(UserIdentity userIdentity) {
         Map<TablePattern, PrivBitSet> ldapTblPrivs = Maps.newConcurrentMap();
-        if (!hasLdapPrivs(userIdentity)) return ldapTblPrivs;
+        if (!hasLdapPrivs(userIdentity)) {
+            return ldapTblPrivs;
+        }
         for (Map.Entry<TablePattern, PrivBitSet> entry : ConnectContext.get().getLdapGroupsPrivs()
                 .getTblPatternToPrivs().entrySet()) {
             if (entry.getKey().getPrivLevel().equals(PaloAuth.PrivLevel.TABLE)) {
@@ -240,7 +244,9 @@ public class LdapPrivsChecker {
 
     public static Map<ResourcePattern, PrivBitSet> getLdapAllResourcePrivs(UserIdentity userIdentity) {
         Map<ResourcePattern, PrivBitSet> ldapResourcePrivs = Maps.newConcurrentMap();
-        if (!hasLdapPrivs(userIdentity)) return ldapResourcePrivs;
+        if (!hasLdapPrivs(userIdentity)) {
+            return ldapResourcePrivs;
+        }
         for (Map.Entry<ResourcePattern, PrivBitSet> entry : ConnectContext.get().getLdapGroupsPrivs()
                 .getResourcePatternToPrivs().entrySet()) {
             if (entry.getKey().getPrivLevel().equals(PaloAuth.PrivLevel.RESOURCE)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/LoadErrorHub.java b/fe/fe-core/src/main/java/org/apache/doris/load/LoadErrorHub.java
index 5d72e10b20..03f1c4a298 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/LoadErrorHub.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/LoadErrorHub.java
@@ -154,6 +154,7 @@ public abstract class LoadErrorHub {
                     break;
                 case BROKER_TYPE:
                     Preconditions.checkState(false, "hadoop load do not support broker error hub");
+                    break;
                 case NULL_TYPE:
                     break;
                 default:
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/StreamLoadRecordMgr.java b/fe/fe-core/src/main/java/org/apache/doris/load/StreamLoadRecordMgr.java
index a51b8cb077..138b5addeb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/StreamLoadRecordMgr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/StreamLoadRecordMgr.java
@@ -118,15 +118,15 @@ public class StreamLoadRecordMgr extends MasterDaemon {
         while (isQueueFull()) {
             StreamLoadItem record = streamLoadRecordHeap.poll();
             if (record != null) {
-                String de_label = record.getLabel();
-                long de_dbId = record.getDbId();
-
-                Map<String, StreamLoadRecord> labelToStreamLoadRecord = dbIdToLabelToStreamLoadRecord.get(de_dbId);
-                Iterator<Map.Entry<String, StreamLoadRecord>> iter_record = labelToStreamLoadRecord.entrySet().iterator();
-                while (iter_record.hasNext()) {
-                    String labelInMap = iter_record.next().getKey();
-                    if (labelInMap.equals(de_label)) {
-                        iter_record.remove();
+                String deLabel = record.getLabel();
+                long deDbId = record.getDbId();
+
+                Map<String, StreamLoadRecord> labelToStreamLoadRecord = dbIdToLabelToStreamLoadRecord.get(deDbId);
+                Iterator<Map.Entry<String, StreamLoadRecord>> iterRecord = labelToStreamLoadRecord.entrySet().iterator();
+                while (iterRecord.hasNext()) {
+                    String labelInMap = iterRecord.next().getKey();
+                    if (labelInMap.equals(deLabel)) {
+                        iterRecord.remove();
                         break;
                     }
                 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkRepository.java b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkRepository.java
index 6d44e5a3a9..7f5a578319 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkRepository.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkRepository.java
@@ -225,15 +225,15 @@ public class SparkRepository {
             // fileName should like:
             //      __lib_md5sum_spark-dpp-1.0.0-jar-with-dependencies.jar
             //      __lib_md5sum_spark-2x.zip
-            String[] lib_arg = unwrap(PREFIX_LIB, fileName).split(FILE_NAME_SEPARATOR);
-            if (lib_arg.length != 2) {
+            String[] libArg = unwrap(PREFIX_LIB, fileName).split(FILE_NAME_SEPARATOR);
+            if (libArg.length != 2) {
                 continue;
             }
-            String md5sum = lib_arg[0];
+            String md5sum = libArg[0];
             if (Strings.isNullOrEmpty(md5sum)) {
                 continue;
             }
-            String type = lib_arg[1];
+            String type = libArg[1];
             SparkLibrary.LibType libType = null;
             if (type.equals(SPARK_DPP)) {
                 libType = SparkLibrary.LibType.DPP;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadManager.java b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadManager.java
index fa54894f0f..cb07031765 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadManager.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadManager.java
@@ -554,10 +554,12 @@ public class RoutineLoadManager implements Writable {
         int i = 0;
         int j = routineLoadJobList.size() - 1;
         while (i < j) {
-            while (!routineLoadJobList.get(i).isFinal() && (i < j))
+            while (!routineLoadJobList.get(i).isFinal() && (i < j)) {
                 i++;
-            while (routineLoadJobList.get(j).isFinal() && (i < j))
+            }
+            while (routineLoadJobList.get(j).isFinal() && (i < j)) {
                 j--;
+            }
             if (i < j) {
                 RoutineLoadJob routineLoadJob = routineLoadJobList.get(i);
                 routineLoadJobList.set(i, routineLoadJobList.get(j));
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncChannelHandle.java b/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncChannelHandle.java
index 9fdb1048e9..ded37f2dde 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncChannelHandle.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncChannelHandle.java
@@ -25,7 +25,7 @@ import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 public class SyncChannelHandle implements SyncChannelCallback {
-    private Logger LOG = LogManager.getLogger(SyncChannelHandle.class);
+    private final static Logger LOG = LogManager.getLogger(SyncChannelHandle.class);
 
     // channel id -> dummy value(-1)
     private MarkedCountDownLatch<Long, Long> latch;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncJob.java b/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncJob.java
index a7401da9e2..399026a37c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/sync/SyncJob.java
@@ -188,7 +188,7 @@ public abstract class SyncJob implements Writable {
     }
 
     private void checkStateTransform(JobState newState) throws UserException {
-        switch (jobState) {
+        switch (jobState) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case PENDING:
                 break;
             case RUNNING:
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalDestination.java b/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalDestination.java
index 8aebd0f085..f2ae06a8f0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalDestination.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalDestination.java
@@ -99,7 +99,9 @@ public class CanalDestination implements Writable {
 
     @Override
     public boolean equals(Object other) {
-        if (other == this) return true;
+        if (other == this) {
+            return true;
+        }
         if (!(other instanceof CanalDestination)) {
             return false;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalSyncJob.java b/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalSyncJob.java
index ce4a8c9017..7749c9d6e7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalSyncJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/sync/canal/CanalSyncJob.java
@@ -273,6 +273,8 @@ public class CanalSyncJob extends SyncJob {
                 case CANCELLED:
                     updateState(JobState.CANCELLED, true);
                     break;
+                default:
+                    throw new UserException("job state is invalid: " + jobState);
             }
         } catch (UserException e) {
             LOG.error(new LogBuilder(LogKey.SYNC_JOB, id)
diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/update/UpdatePlanner.java b/fe/fe-core/src/main/java/org/apache/doris/load/update/UpdatePlanner.java
index 01b1823bfc..c8660c316d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/load/update/UpdatePlanner.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/load/update/UpdatePlanner.java
@@ -51,8 +51,8 @@ import java.util.Map;
 
 public class UpdatePlanner extends Planner {
 
-    private final IdGenerator<PlanNodeId> nodeIdGenerator_ = PlanNodeId.createGenerator();
-    private final IdGenerator<PlanFragmentId> fragmentIdGenerator_ =
+    private final IdGenerator<PlanNodeId> nodeIdGenerator = PlanNodeId.createGenerator();
+    private final IdGenerator<PlanFragmentId> fragmentIdGenerator =
             PlanFragmentId.createGenerator();
 
     private long targetDBId;
@@ -79,7 +79,7 @@ public class UpdatePlanner extends Planner {
 
     public void plan(long txnId) throws UserException {
         // 1. gen scan node
-        OlapScanNode olapScanNode = new OlapScanNode(nodeIdGenerator_.getNextId(), srcTupleDesc, "OlapScanNode");
+        OlapScanNode olapScanNode = new OlapScanNode(nodeIdGenerator.getNextId(), srcTupleDesc, "OlapScanNode");
         /* BEGIN: Temporary code, this part of the code needs to be refactored */
         olapScanNode.closePreAggregation("This an update operation");
         olapScanNode.useBaseIndexId();
@@ -97,7 +97,7 @@ public class UpdatePlanner extends Planner {
                 analyzer.getContext().getSessionVariable().sendBatchParallelism, false);
         olapTableSink.complete();
         // 3. gen plan fragment
-        PlanFragment planFragment = new PlanFragment(fragmentIdGenerator_.getNextId(), olapScanNode,
+        PlanFragment planFragment = new PlanFragment(fragmentIdGenerator.getNextId(), olapScanNode,
                 DataPartition.RANDOM);
         planFragment.setSink(olapTableSink);
         planFragment.setOutputExprs(computeOutputExprs());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmPauseMonitor.java b/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmPauseMonitor.java
index 0b6671aa7e..40153bb9de 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmPauseMonitor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmPauseMonitor.java
@@ -51,14 +51,14 @@ public class JvmPauseMonitor {
 
     // Check for Java deadlocks at this interval. Set by init(). 0 or negative means that
     // the deadlock checks are disabled.
-    private long deadlockCheckIntervalS_ = 0;
+    private long deadlockCheckIntervalS = 0;
 
     // log WARN if we detect a pause longer than this threshold.
-    private long warnThresholdMs_;
+    private long warnThresholdMs;
     private static final long WARN_THRESHOLD_MS = 10000;
 
     // log INFO if we detect a pause longer than this threshold.
-    private long infoThresholdMs_;
+    private long infoThresholdMs;
     private static final long INFO_THRESHOLD_MS = 1000;
 
     // Overall metrics
@@ -70,7 +70,7 @@ public class JvmPauseMonitor {
     private volatile long totalGcExtraSleepTime = 0;
 
     // Daemon thread running the pause monitor loop.
-    private Thread monitorThread_;
+    private Thread monitorThread;
     private volatile boolean shouldRun = true;
 
     // Singleton instance of this pause monitor.
@@ -78,7 +78,9 @@ public class JvmPauseMonitor {
 
     // Initializes the pause monitor. No-op if called multiple times.
     public static void initPauseMonitor(long deadlockCheckIntervalS) {
-        if (INSTANCE.isStarted()) return;
+        if (INSTANCE.isStarted()) {
+            return;
+        }
         INSTANCE.init(deadlockCheckIntervalS);
     }
 
@@ -87,19 +89,19 @@ public class JvmPauseMonitor {
     }
 
     private JvmPauseMonitor(long infoThresholdMs, long warnThresholdMs) {
-        this.infoThresholdMs_ = infoThresholdMs;
-        this.warnThresholdMs_ = warnThresholdMs;
+        this.infoThresholdMs = infoThresholdMs;
+        this.warnThresholdMs = warnThresholdMs;
     }
 
     protected void init(long deadlockCheckIntervalS) {
-        deadlockCheckIntervalS_ = deadlockCheckIntervalS;
-        monitorThread_ = new Thread(new Monitor(), "JVM pause monitor");
-        monitorThread_.setDaemon(true);
-        monitorThread_.start();
+        this.deadlockCheckIntervalS = deadlockCheckIntervalS;
+        monitorThread = new Thread(new Monitor(), "JVM pause monitor");
+        monitorThread.setDaemon(true);
+        monitorThread.start();
     }
 
     public boolean isStarted() {
-        return monitorThread_ != null;
+        return monitorThread != null;
     }
 
     public long getNumGcWarnThresholdExceeded() {
@@ -202,11 +204,11 @@ public class JvmPauseMonitor {
                 long extraSleepTime = sw.elapsed(TimeUnit.MILLISECONDS) - SLEEP_INTERVAL_MS;
                 Map<String, GcTimes> gcTimesAfterSleep = getGcTimes();
 
-                if (extraSleepTime > warnThresholdMs_) {
+                if (extraSleepTime > warnThresholdMs) {
                     ++numGcWarnThresholdExceeded;
                     LOG.warn(formatMessage(
                             extraSleepTime, gcTimesAfterSleep, gcTimesBeforeSleep));
-                } else if (extraSleepTime > infoThresholdMs_) {
+                } else if (extraSleepTime > infoThresholdMs) {
                     ++numGcInfoThresholdExceeded;
                     LOG.info(formatMessage(
                             extraSleepTime, gcTimesAfterSleep, gcTimesBeforeSleep));
@@ -214,8 +216,8 @@ public class JvmPauseMonitor {
                 totalGcExtraSleepTime += extraSleepTime;
                 gcTimesBeforeSleep = gcTimesAfterSleep;
 
-                if (deadlockCheckIntervalS_ > 0 &&
-                        timeSinceDeadlockCheck.elapsed(TimeUnit.SECONDS) >= deadlockCheckIntervalS_) {
+                if (deadlockCheckIntervalS > 0 &&
+                        timeSinceDeadlockCheck.elapsed(TimeUnit.SECONDS) >= deadlockCheckIntervalS) {
                     checkForDeadlocks();
                     timeSinceDeadlockCheck.reset().start();
                 }
@@ -242,7 +244,9 @@ public class JvmPauseMonitor {
                 for (ThreadInfo thread : deadlockedThreads) {
                     // Defensively check for null in case the thread somehow disappeared between
                     // findDeadlockedThreads() and getThreadInfo().
-                    if (thread != null) LOG.error(thread.toString());
+                    if (thread != null) {
+                        LOG.error(thread.toString());
+                    }
                 }
                 LOG.warn("All threads:");
                 for (ThreadInfo thread : threadMx.dumpAllThreads(true, true)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmStats.java b/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmStats.java
index bb5c33dcb8..97982c4c58 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmStats.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/monitor/jvm/JvmStats.java
@@ -92,7 +92,9 @@ public class JvmStats {
         int threadsTerminated = 0;
         long threadIds[] = threadMXBean.getAllThreadIds();
         for (ThreadInfo threadInfo : threadMXBean.getThreadInfo(threadIds, 0)) {
-            if (threadInfo == null) continue; // race protection
+            if (threadInfo == null) {
+                continue; // race protection
+            }
             switch (threadInfo.getThreadState()) {
                 case NEW:           threadsNew++;           break;
                 case RUNNABLE:      threadsRunnable++;      break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/ByteSizeUnit.java b/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/ByteSizeUnit.java
index 504e3bc9c9..f4f487a37f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/ByteSizeUnit.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/ByteSizeUnit.java
@@ -60,7 +60,7 @@ public enum ByteSizeUnit {
     KB {
         @Override
         public long toBytes(long size) {
-            return x(size, C1 / C0, MAX / (C1 / C0));
+            return multiply(size, C1 / C0, MAX / (C1 / C0));
         }
 
         @Override
@@ -91,12 +91,12 @@ public enum ByteSizeUnit {
     MB {
         @Override
         public long toBytes(long size) {
-            return x(size, C2 / C0, MAX / (C2 / C0));
+            return multiply(size, C2 / C0, MAX / (C2 / C0));
         }
 
         @Override
         public long toKB(long size) {
-            return x(size, C2 / C1, MAX / (C2 / C1));
+            return multiply(size, C2 / C1, MAX / (C2 / C1));
         }
 
         @Override
@@ -122,17 +122,17 @@ public enum ByteSizeUnit {
     GB {
         @Override
         public long toBytes(long size) {
-            return x(size, C3 / C0, MAX / (C3 / C0));
+            return multiply(size, C3 / C0, MAX / (C3 / C0));
         }
 
         @Override
         public long toKB(long size) {
-            return x(size, C3 / C1, MAX / (C3 / C1));
+            return multiply(size, C3 / C1, MAX / (C3 / C1));
         }
 
         @Override
         public long toMB(long size) {
-            return x(size, C3 / C2, MAX / (C3 / C2));
+            return multiply(size, C3 / C2, MAX / (C3 / C2));
         }
 
         @Override
@@ -153,22 +153,22 @@ public enum ByteSizeUnit {
     TB {
         @Override
         public long toBytes(long size) {
-            return x(size, C4 / C0, MAX / (C4 / C0));
+            return multiply(size, C4 / C0, MAX / (C4 / C0));
         }
 
         @Override
         public long toKB(long size) {
-            return x(size, C4 / C1, MAX / (C4 / C1));
+            return multiply(size, C4 / C1, MAX / (C4 / C1));
         }
 
         @Override
         public long toMB(long size) {
-            return x(size, C4 / C2, MAX / (C4 / C2));
+            return multiply(size, C4 / C2, MAX / (C4 / C2));
         }
 
         @Override
         public long toGB(long size) {
-            return x(size, C4 / C3, MAX / (C4 / C3));
+            return multiply(size, C4 / C3, MAX / (C4 / C3));
         }
 
         @Override
@@ -184,27 +184,27 @@ public enum ByteSizeUnit {
     PB {
         @Override
         public long toBytes(long size) {
-            return x(size, C5 / C0, MAX / (C5 / C0));
+            return multiply(size, C5 / C0, MAX / (C5 / C0));
         }
 
         @Override
         public long toKB(long size) {
-            return x(size, C5 / C1, MAX / (C5 / C1));
+            return multiply(size, C5 / C1, MAX / (C5 / C1));
         }
 
         @Override
         public long toMB(long size) {
-            return x(size, C5 / C2, MAX / (C5 / C2));
+            return multiply(size, C5 / C2, MAX / (C5 / C2));
         }
 
         @Override
         public long toGB(long size) {
-            return x(size, C5 / C3, MAX / (C5 / C3));
+            return multiply(size, C5 / C3, MAX / (C5 / C3));
         }
 
         @Override
         public long toTB(long size) {
-            return x(size, C5 / C4, MAX / (C5 / C4));
+            return multiply(size, C5 / C4, MAX / (C5 / C4));
         }
 
         @Override
@@ -233,9 +233,13 @@ public enum ByteSizeUnit {
      * Scale d by m, checking for overflow.
      * This has a short name to make above code more readable.
      */
-    static long x(long d, long m, long over) {
-        if (d > over) return Long.MAX_VALUE;
-        if (d < -over) return Long.MIN_VALUE;
+    static long multiply(long d, long m, long over) {
+        if (d > over) {
+            return Long.MAX_VALUE;
+        }
+        if (d < -over) {
+            return Long.MIN_VALUE;
+        }
         return d * m;
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/TimeValue.java b/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/TimeValue.java
index 8f15c01968..8f135bff10 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/TimeValue.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/monitor/unit/TimeValue.java
@@ -341,8 +341,12 @@ public class TimeValue implements Comparable<TimeValue> {
 
     @Override
     public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
 
         return this.compareTo(((TimeValue) o)) == 0;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/AcceptListener.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/AcceptListener.java
index 847cf5eba9..10a4d36f19 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/AcceptListener.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/AcceptListener.java
@@ -36,8 +36,8 @@ import java.io.IOException;
  * listener for accept mysql connections.
  */
 public class AcceptListener implements ChannelListener<AcceptingChannel<StreamConnection>> {
-    private final Logger LOG = LogManager.getLogger(this.getClass());
-    private ConnectScheduler connectScheduler;
+    private final static Logger LOG = LogManager.getLogger(AcceptListener.class);
+    private final ConnectScheduler connectScheduler;
 
     public AcceptListener(ConnectScheduler connectScheduler) {
         this.connectScheduler = connectScheduler;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlChannel.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlChannel.java
index 44c9c51131..2cfc5a9e51 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlChannel.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlChannel.java
@@ -33,7 +33,7 @@ import java.nio.ByteBuffer;
  * mysql Channel based on nio.
  */
 public class NMysqlChannel extends MysqlChannel {
-    protected final Logger LOG = LogManager.getLogger(this.getClass());
+    protected final static Logger LOG = LogManager.getLogger(NMysqlChannel.class);
     private StreamConnection conn;
 
     public NMysqlChannel(StreamConnection connection) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlServer.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlServer.java
index a33223edc1..51e6d975f7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlServer.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/NMysqlServer.java
@@ -39,7 +39,7 @@ import java.util.concurrent.ExecutorService;
  * mysql protocol implementation based on nio.
  */
 public class NMysqlServer extends MysqlServer {
-    private final Logger LOG = LogManager.getLogger(this.getClass());
+    private final static Logger LOG = LogManager.getLogger(NMysqlServer.class);
 
     private XnioWorker xnioWorker;
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/ReadListener.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/ReadListener.java
index f9123cf5df..82df15160d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/ReadListener.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/nio/ReadListener.java
@@ -30,7 +30,7 @@ import org.xnio.conduits.ConduitStreamSourceChannel;
  * listener for handle mysql cmd.
  */
 public class ReadListener implements ChannelListener<ConduitStreamSourceChannel> {
-    private final Logger LOG = LogManager.getLogger(this.getClass());
+    private static final Logger LOG = LogManager.getLogger(ReadListener.class);
     private NConnectContext ctx;
     private ConnectProcessor connectProcessor;
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/PaloAuth.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/PaloAuth.java
index 852ba178bf..3657e03c9f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/PaloAuth.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/privilege/PaloAuth.java
@@ -985,7 +985,7 @@ public class PaloAuth implements Writable {
                             boolean errOnNonExist) throws DdlException {
         writeLock();
         try {
-            switch (resourcePattern.getPrivLevel()) {
+            switch (resourcePattern.getPrivLevel()) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
                 case GLOBAL:
                     revokeGlobalPrivs(userIdent, privs, errOnNonExist);
                     break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
index 8dfc6bec91..779ca0ef93 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
@@ -67,7 +67,6 @@ import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
 import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
 
 import com.google.common.collect.Lists;
-
 import org.antlr.v4.runtime.ParserRuleContext;
 import org.antlr.v4.runtime.RuleContext;
 import org.antlr.v4.runtime.tree.ParseTree;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryExpression.java
index 988985c196..9cd3d9abc4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryExpression.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryExpression.java
@@ -18,7 +18,6 @@
 package org.apache.doris.nereids.trees.expressions;
 
 import org.apache.doris.nereids.trees.BinaryNode;
-import org.apache.doris.nereids.trees.NodeType;
 
 /**
  * Interface for all expression that have two children.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/AlterViewInfo.java b/fe/fe-core/src/main/java/org/apache/doris/persist/AlterViewInfo.java
index 2037f50c48..3f14c65add 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/AlterViewInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/AlterViewInfo.java
@@ -83,7 +83,9 @@ public class AlterViewInfo implements Writable {
 
     @Override
     public boolean equals(Object other) {
-        if (other == this) return true;
+        if (other == this) {
+            return true;
+        }
         if (!(other instanceof AlterViewInfo)) {
             return false;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/ColocatePersistInfo.java b/fe/fe-core/src/main/java/org/apache/doris/persist/ColocatePersistInfo.java
index eb49e63e2c..8016a38cc3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/ColocatePersistInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/ColocatePersistInfo.java
@@ -33,6 +33,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 /**
  * PersistInfo for ColocateTableIndex
@@ -117,6 +118,11 @@ public class ColocatePersistInfo implements Writable {
         }
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(groupId, tableId, backendsPerBucketSeq);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (obj == this) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/CreateTableInfo.java b/fe/fe-core/src/main/java/org/apache/doris/persist/CreateTableInfo.java
index 843dfc4b97..a59a68e5f5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/CreateTableInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/CreateTableInfo.java
@@ -27,6 +27,7 @@ import org.slf4j.LoggerFactory;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Objects;
 
 public class CreateTableInfo implements Writable {
     public static final Logger LOG = LoggerFactory.getLogger(CreateTableInfo.class);
@@ -66,6 +67,11 @@ public class CreateTableInfo implements Writable {
         return createTableInfo;
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(dbName, table);
+    }
+
     public boolean equals(Object obj) {
         if (this == obj) {
             return true;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java b/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java
index 31c06fc5ea..fbdf117921 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java
@@ -218,7 +218,7 @@ public class OperationType {
     public static final short OP_CREATE_SQL_BLOCK_RULE = 300;
     public static final short OP_ALTER_SQL_BLOCK_RULE = 301;
     public static final short OP_DROP_SQL_BLOCK_RULE = 302;
-    
+
     // policy 310-320
     public static final short OP_CREATE_POLICY = 310;
     public static final short OP_DROP_POLICY = 311;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java
index 9a24d72303..d49332d5f9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/AggregationNode.java
@@ -96,9 +96,9 @@ public class AggregationNode extends PlanNode {
      * Sets this node as a preaggregation. Only valid to call this if it is not marked
      * as a preaggregation
      */
-    public void setIsPreagg(PlannerContext ctx_) {
-        useStreamingPreagg =  ctx_.getQueryOptions().isSetDisableStreamPreaggregations()
-                && !ctx_.getQueryOptions().disable_stream_preaggregations
+    public void setIsPreagg(PlannerContext ctx) {
+        useStreamingPreagg =  ctx.getQueryOptions().isSetDisableStreamPreaggregations()
+                && !ctx.getQueryOptions().disable_stream_preaggregations
                 && aggInfo.getGroupingExprs().size() > 0;
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticPlanner.java
index 57925d25c4..f276c50e73 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticPlanner.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/AnalyticPlanner.java
@@ -76,12 +76,12 @@ public class AnalyticPlanner {
     private final AnalyticInfo analyticInfo;
     private final Analyzer analyzer;
 
-    private final PlannerContext ctx_;
+    private final PlannerContext ctx;
 
     public AnalyticPlanner(AnalyticInfo analyticInfo, Analyzer analyzer, PlannerContext ctx) {
         this.analyticInfo = analyticInfo;
         this.analyzer = analyzer;
-        this.ctx_ = ctx;
+        this.ctx = ctx;
     }
 
     /**
@@ -342,7 +342,9 @@ public class AnalyticPlanner {
             for (int i = 0; i < inputSmap.size(); ++i) {
                 Expr rhsExpr = inputSmap.getRhs().get(i);
                 // Ignore substitutions that are irrelevant at this plan node and its ancestors.
-                if (!rhsExpr.isBoundByTupleIds(input.getTupleIds())) continue;
+                if (!rhsExpr.isBoundByTupleIds(input.getTupleIds())) {
+                    continue;
+                }
                 rhsExpr.collect(TupleIsNullPredicate.class, tupleIsNullPredsToMaterialize);
             }
             Expr.removeDuplicates(tupleIsNullPredsToMaterialize);
@@ -414,7 +416,7 @@ public class AnalyticPlanner {
             }
 
             SortInfo sortInfo = createSortInfo(newRoot, sortExprs, isAsc, nullsFirst);
-            SortNode sortNode = new SortNode(ctx_.getNextNodeId(), newRoot, sortInfo, false, false, 0);
+            SortNode sortNode = new SortNode(ctx.getNextNodeId(), newRoot, sortInfo, false, false, 0);
 
             // if this sort group does not have partitioning exprs, we want the sort
             // to be executed like a regular distributed sort
@@ -482,7 +484,7 @@ public class AnalyticPlanner {
                 LOG.trace("orderByEq: " + orderByEq.debugString());
             }
 
-            AnalyticEvalNode node = new AnalyticEvalNode(ctx_.getNextNodeId(), newRoot,
+            AnalyticEvalNode node = new AnalyticEvalNode(ctx.getNextNodeId(), newRoot,
                     windowGroup.analyticFnCalls, windowGroup.partitionByExprs,
                     windowGroup.orderByElements,
                     windowGroup.window,
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/BrokerScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/BrokerScanNode.java
index 59fada152e..f6f137f77f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/BrokerScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/BrokerScanNode.java
@@ -49,6 +49,7 @@ import org.apache.doris.thrift.TBrokerScanRange;
 import org.apache.doris.thrift.TBrokerScanRangeParams;
 import org.apache.doris.thrift.TExplainLevel;
 import org.apache.doris.thrift.TFileFormatType;
+import org.apache.doris.thrift.TFileType;
 import org.apache.doris.thrift.THdfsParams;
 import org.apache.doris.thrift.TNetworkAddress;
 import org.apache.doris.thrift.TScanRange;
@@ -452,11 +453,11 @@ public class BrokerScanNode extends LoadScanNode {
         return "";
     }
 
-    private String getHeaderType(String format_type) {
-        if (format_type != null) {
-            if (format_type.toLowerCase().equals(FeConstants.csv_with_names)
-                    || format_type.toLowerCase().equals(FeConstants.csv_with_names_and_types)) {
-                return format_type;
+    private String getHeaderType(String formatType) {
+        if (formatType != null) {
+            if (formatType.toLowerCase().equals(FeConstants.csv_with_names)
+                    || formatType.toLowerCase().equals(FeConstants.csv_with_names_and_types)) {
+                return formatType;
             }
         }
         return "";
@@ -483,7 +484,7 @@ public class BrokerScanNode extends LoadScanNode {
             long leftBytes = fileStatus.size - curFileOffset;
             long tmpBytes = curInstanceBytes + leftBytes;
             //header_type
-            String header_type = getHeaderType(context.fileGroup.getFileFormat());
+            String headerType = getHeaderType(context.fileGroup.getFileFormat());
             TFileFormatType formatType = formatType(context.fileGroup.getFileFormat(), fileStatus.path);
             List<String> columnsFromPath = BrokerUtil.parseColumnsFromPath(fileStatus.path,
                     context.fileGroup.getColumnsFromPath());
@@ -494,7 +495,7 @@ public class BrokerScanNode extends LoadScanNode {
                         || formatType == TFileFormatType.FORMAT_JSON) {
                     long rangeBytes = bytesPerInstance - curInstanceBytes;
                     TBrokerRangeDesc rangeDesc = createBrokerRangeDesc(curFileOffset, fileStatus, formatType,
-                            rangeBytes, columnsFromPath, numberOfColumnsFromFile, brokerDesc, header_type);
+                            rangeBytes, columnsFromPath, numberOfColumnsFromFile, brokerDesc, headerType);
                     if (formatType == TFileFormatType.FORMAT_JSON) {
                         rangeDesc.setStripOuterArray(context.fileGroup.isStripOuterArray());
                         rangeDesc.setJsonpaths(context.fileGroup.getJsonPaths());
@@ -508,7 +509,7 @@ public class BrokerScanNode extends LoadScanNode {
 
                 } else {
                     TBrokerRangeDesc rangeDesc = createBrokerRangeDesc(curFileOffset, fileStatus, formatType,
-                            leftBytes, columnsFromPath, numberOfColumnsFromFile, brokerDesc, header_type);
+                            leftBytes, columnsFromPath, numberOfColumnsFromFile, brokerDesc, headerType);
                     if (rangeDesc.hdfs_params != null && rangeDesc.hdfs_params.getFsName() == null) {
                         rangeDesc.hdfs_params.setFsName(fsName);
                     } else if (rangeDesc.hdfs_params == null) {
@@ -528,7 +529,7 @@ public class BrokerScanNode extends LoadScanNode {
 
             } else {
                 TBrokerRangeDesc rangeDesc = createBrokerRangeDesc(curFileOffset, fileStatus, formatType,
-                        leftBytes, columnsFromPath, numberOfColumnsFromFile, brokerDesc, header_type);
+                        leftBytes, columnsFromPath, numberOfColumnsFromFile, brokerDesc, headerType);
                 if (formatType == TFileFormatType.FORMAT_JSON) {
                     rangeDesc.setStripOuterArray(context.fileGroup.isStripOuterArray());
                     rangeDesc.setJsonpaths(context.fileGroup.getJsonPaths());
@@ -573,10 +574,8 @@ public class BrokerScanNode extends LoadScanNode {
         rangeDesc.setColumnsFromPath(columnsFromPath);
         rangeDesc.setHeaderType(headerType);
         // set hdfs params for hdfs file type.
-        switch (brokerDesc.getFileType()) {
-            case FILE_HDFS:
-                BrokerUtil.generateHdfsParam(brokerDesc.getProperties(), rangeDesc);
-                break;
+        if (brokerDesc.getFileType() == TFileType.FILE_HDFS) {
+            BrokerUtil.generateHdfsParam(brokerDesc.getProperties(), rangeDesc);
         }
         return rangeDesc;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ColumnBound.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ColumnBound.java
index 805d4ddaa3..05cc040f73 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ColumnBound.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ColumnBound.java
@@ -53,8 +53,12 @@ public class ColumnBound implements Comparable<ColumnBound> {
 
     @Override
     public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
         ColumnBound that = (ColumnBound) o;
         return Objects.equal(value, that.value);
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/CrossJoinNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/CrossJoinNode.java
index 7cb411da8d..0df0fa2803 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/CrossJoinNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/CrossJoinNode.java
@@ -38,11 +38,11 @@ public class CrossJoinNode extends PlanNode {
     // Default per-host memory requirement used if no valid stats are available.
     // TODO: Come up with a more useful heuristic (e.g., based on scanned partitions).
     private final static long DEFAULT_PER_HOST_MEM = 2L * 1024L * 1024L * 1024L;
-    private final TableRef innerRef_;
+    private final TableRef innerRef;
 
     public CrossJoinNode(PlanNodeId id, PlanNode outer, PlanNode inner, TableRef innerRef) {
         super(id, "CROSS JOIN");
-        innerRef_ = innerRef;
+        this.innerRef = innerRef;
         tupleIds.addAll(outer.getTupleIds());
         tupleIds.addAll(inner.getTupleIds());
         tblRefIds.addAll(outer.getTblRefIds());
@@ -57,7 +57,7 @@ public class CrossJoinNode extends PlanNode {
     }
 
     public TableRef getInnerRef() {
-        return innerRef_;
+        return innerRef;
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/DataSink.java b/fe/fe-core/src/main/java/org/apache/doris/planner/DataSink.java
index 2de32c35fa..03f24a5324 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/DataSink.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/DataSink.java
@@ -35,7 +35,7 @@ import org.apache.doris.thrift.TExplainLevel;
  */
 public abstract class DataSink {
     // Fragment that this DataSink belongs to. Set by the PlanFragment enclosing this sink.
-    protected PlanFragment fragment_;
+    protected PlanFragment fragment;
 
     /**
      * Return an explain string for the DataSink. Each line of the explain will be prefixed
@@ -48,8 +48,8 @@ public abstract class DataSink {
 
     protected abstract TDataSink toThrift();
 
-    public void setFragment(PlanFragment fragment) { fragment_ = fragment; }
-    public PlanFragment getFragment() { return fragment_; }
+    public void setFragment(PlanFragment fragment) { this.fragment = fragment; }
+    public PlanFragment getFragment() { return fragment; }
 
     public abstract PlanNodeId getExchNodeId();
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/DistributedPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/planner/DistributedPlanner.java
index 293d00436f..2b4485f1a1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/DistributedPlanner.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/DistributedPlanner.java
@@ -61,10 +61,10 @@ import java.util.stream.Collectors;
 public class DistributedPlanner {
     private final static Logger LOG = LogManager.getLogger(DistributedPlanner.class);
 
-    private final PlannerContext ctx_;
+    private final PlannerContext ctx;
 
     public DistributedPlanner(PlannerContext ctx) {
-        ctx_ = ctx;
+        this.ctx = ctx;
     }
 
     /**
@@ -80,9 +80,9 @@ public class DistributedPlanner {
      */
     public ArrayList<PlanFragment> createPlanFragments(
             PlanNode singleNodePlan) throws UserException, AnalysisException {
-        Preconditions.checkState(!ctx_.isSingleNodeExec());
+        Preconditions.checkState(!ctx.isSingleNodeExec());
         // AnalysisContext.AnalysisResult analysisResult = ctx_.getAnalysisResult();
-        QueryStmt queryStmt = ctx_.getQueryStmt();
+        QueryStmt queryStmt = ctx.getQueryStmt();
         ArrayList<PlanFragment> fragments = Lists.newArrayList();
         // For inserts or CTAS, unless there is a limit, leave the root fragment
         // partitioned, otherwise merge everything into a single coordinator fragment,
@@ -94,7 +94,7 @@ public class DistributedPlanner {
         //     Preconditions.checkState(!queryStmt.hasOffset());
         //     isPartitioned = true;
         // }
-        if (ctx_.isInsert() && !singleNodePlan.hasLimit()) {
+        if (ctx.isInsert() && !singleNodePlan.hasLimit()) {
             Preconditions.checkState(!queryStmt.hasOffset());
             isPartitioned = true;
         }
@@ -158,13 +158,13 @@ public class DistributedPlanner {
         // Following is repartition logic
         Preconditions.checkState(needRepartition);
 
-        ExchangeNode exchNode = new ExchangeNode(ctx_.getNextNodeId(), inputFragment.getPlanRoot(), false);
+        ExchangeNode exchNode = new ExchangeNode(ctx.getNextNodeId(), inputFragment.getPlanRoot(), false);
         exchNode.setNumInstances(inputFragment.getPlanRoot().getNumInstances());
         // exchNode.computeStats(analyzer);
         // exchNode.createDefaultSmap(analyzer);
-        exchNode.init(ctx_.getRootAnalyzer());
+        exchNode.init(ctx.getRootAnalyzer());
         DataPartition dataPartition = stmt.getDataPartition();
-        PlanFragment fragment = new PlanFragment(ctx_.getNextFragmentId(), exchNode, dataPartition);
+        PlanFragment fragment = new PlanFragment(ctx.getNextFragmentId(), exchNode, dataPartition);
         inputFragment.setDestination(exchNode);
         inputFragment.setOutputPartition(dataPartition);
         fragments.add(fragment);
@@ -224,7 +224,7 @@ public class DistributedPlanner {
         } else if (root instanceof AnalyticEvalNode) {
             result = createAnalyticFragment(root, childFragments.get(0), fragments);
         } else if (root instanceof EmptySetNode) {
-            result = new PlanFragment(ctx_.getNextFragmentId(), root, DataPartition.UNPARTITIONED);
+            result = new PlanFragment(ctx.getNextFragmentId(), root, DataPartition.UNPARTITIONED);
         } else if (root instanceof RepeatNode) {
             result = createRepeatNodeFragment((RepeatNode) root, childFragments.get(0), fragments);
         } else if (root instanceof AssertNumRowsNode) {
@@ -256,11 +256,11 @@ public class DistributedPlanner {
 
         // exchange node clones the behavior of its input, aside from the conjuncts
         ExchangeNode mergePlan =
-                new ExchangeNode(ctx_.getNextNodeId(), inputFragment.getPlanRoot(), false);
+                new ExchangeNode(ctx.getNextNodeId(), inputFragment.getPlanRoot(), false);
         mergePlan.setNumInstances(inputFragment.getPlanRoot().getNumInstances());
-        mergePlan.init(ctx_.getRootAnalyzer());
+        mergePlan.init(ctx.getRootAnalyzer());
         Preconditions.checkState(mergePlan.hasValidStats());
-        PlanFragment fragment = new PlanFragment(ctx_.getNextFragmentId(), mergePlan, DataPartition.UNPARTITIONED);
+        PlanFragment fragment = new PlanFragment(ctx.getNextFragmentId(), mergePlan, DataPartition.UNPARTITIONED);
         inputFragment.setDestination(mergePlan);
         return fragment;
     }
@@ -273,17 +273,17 @@ public class DistributedPlanner {
      */
     private PlanFragment createScanFragment(PlanNode node) throws UserException {
         if (node instanceof MysqlScanNode || node instanceof OdbcScanNode) {
-            return new PlanFragment(ctx_.getNextFragmentId(), node, DataPartition.UNPARTITIONED);
+            return new PlanFragment(ctx.getNextFragmentId(), node, DataPartition.UNPARTITIONED);
         } else if (node instanceof SchemaScanNode) {
-            return new PlanFragment(ctx_.getNextFragmentId(), node, DataPartition.UNPARTITIONED);
+            return new PlanFragment(ctx.getNextFragmentId(), node, DataPartition.UNPARTITIONED);
         } else if (node instanceof OlapScanNode) {
             // olap scan node
             OlapScanNode olapScanNode = (OlapScanNode) node;
-            return new PlanFragment(ctx_.getNextFragmentId(), node,
+            return new PlanFragment(ctx.getNextFragmentId(), node,
                     olapScanNode.constructInputPartitionByDistributionInfo(), DataPartition.RANDOM);
         } else {
             // other scan nodes are random partitioned: es, broker
-            return new PlanFragment(ctx_.getNextFragmentId(), node, DataPartition.RANDOM);
+            return new PlanFragment(ctx.getNextFragmentId(), node, DataPartition.RANDOM);
         }
     }
 
@@ -328,9 +328,9 @@ public class DistributedPlanner {
             DataPartition rhsJoinPartition =
                     new DataPartition(TPartitionType.BUCKET_SHFFULE_HASH_PARTITIONED, rhsPartitionxprs);
             ExchangeNode rhsExchange =
-                    new ExchangeNode(ctx_.getNextNodeId(), rightChildFragment.getPlanRoot(), false);
+                    new ExchangeNode(ctx.getNextNodeId(), rightChildFragment.getPlanRoot(), false);
             rhsExchange.setNumInstances(rightChildFragment.getPlanRoot().getNumInstances());
-            rhsExchange.init(ctx_.getRootAnalyzer());
+            rhsExchange.init(ctx.getRootAnalyzer());
 
             node.setChild(0, leftChildFragment.getPlanRoot());
             node.setChild(1, rhsExchange);
@@ -358,7 +358,7 @@ public class DistributedPlanner {
                 doBroadcast = true;
             } else if (!node.getInnerRef().isPartitionJoin() && joinCostEvaluation.isBroadcastCostSmaller()
                     && joinCostEvaluation.constructHashTableSpace()
-                    <= ctx_.getRootAnalyzer().getAutoBroadcastJoinThreshold()) {
+                    <= ctx.getRootAnalyzer().getAutoBroadcastJoinThreshold()) {
                 doBroadcast = true;
             } else {
                 doBroadcast = false;
@@ -398,20 +398,20 @@ public class DistributedPlanner {
             DataPartition lhsJoinPartition = new DataPartition(TPartitionType.HASH_PARTITIONED,
                     Expr.cloneList(lhsJoinExprs, null));
             ExchangeNode lhsExchange =
-                    new ExchangeNode(ctx_.getNextNodeId(), leftChildFragment.getPlanRoot(), false);
+                    new ExchangeNode(ctx.getNextNodeId(), leftChildFragment.getPlanRoot(), false);
             lhsExchange.setNumInstances(leftChildFragment.getPlanRoot().getNumInstances());
-            lhsExchange.init(ctx_.getRootAnalyzer());
+            lhsExchange.init(ctx.getRootAnalyzer());
 
             DataPartition rhsJoinPartition =
                     new DataPartition(TPartitionType.HASH_PARTITIONED, rhsJoinExprs);
             ExchangeNode rhsExchange =
-                    new ExchangeNode(ctx_.getNextNodeId(), rightChildFragment.getPlanRoot(), false);
+                    new ExchangeNode(ctx.getNextNodeId(), rightChildFragment.getPlanRoot(), false);
             rhsExchange.setNumInstances(rightChildFragment.getPlanRoot().getNumInstances());
-            rhsExchange.init(ctx_.getRootAnalyzer());
+            rhsExchange.init(ctx.getRootAnalyzer());
 
             node.setChild(0, lhsExchange);
             node.setChild(1, rhsExchange);
-            PlanFragment joinFragment = new PlanFragment(ctx_.getNextFragmentId(), node, lhsJoinPartition);
+            PlanFragment joinFragment = new PlanFragment(ctx.getNextFragmentId(), node, lhsJoinPartition);
             // connect the child fragments
             leftChildFragment.setDestination(lhsExchange);
             leftChildFragment.setOutputPartition(lhsJoinPartition);
@@ -616,8 +616,9 @@ public class DistributedPlanner {
         if (leftScanNode.getSelectedPartitionIds().size() != 1) {
             ColocateTableIndex colocateIndex = Catalog.getCurrentColocateIndex();
             if (!leftTable.isColocateTable() ||
-                    colocateIndex.isGroupUnstable(colocateIndex.getGroup(leftTable.getId())))
+                    colocateIndex.isGroupUnstable(colocateIndex.getGroup(leftTable.getId()))) {
                 return false;
+            }
         }
 
         DistributionInfo leftDistribution = leftScanNode.getOlapTable().getDefaultDistributionInfo();
@@ -664,7 +665,9 @@ public class DistributedPlanner {
                     }
                 }
 
-                if (!findRhsExprs) return false;
+                if (!findRhsExprs) {
+                    return false;
+                }
             }
         } else {
             return false;
@@ -710,13 +713,15 @@ public class DistributedPlanner {
         // were dropped because of constant predicates that evaluated to false.
         if (setOperationNode.getChildren().isEmpty()) {
             return new PlanFragment(
-                    ctx_.getNextFragmentId(), setOperationNode, DataPartition.UNPARTITIONED);
+                    ctx.getNextFragmentId(), setOperationNode, DataPartition.UNPARTITIONED);
         }
 
         Preconditions.checkState(!childFragments.isEmpty());
         int numUnpartitionedChildFragments = 0;
         for (int i = 0; i < childFragments.size(); ++i) {
-            if (!childFragments.get(i).isPartitioned()) ++numUnpartitionedChildFragments;
+            if (!childFragments.get(i).isPartitioned()) {
+                ++numUnpartitionedChildFragments;
+            }
         }
 
         // remove all children to avoid them being tagged with the wrong
@@ -727,7 +732,7 @@ public class DistributedPlanner {
         // with a UnionNode that merges all child fragments.
         if (numUnpartitionedChildFragments == childFragments.size()) {
             PlanFragment setOperationFragment = new PlanFragment(
-                    ctx_.getNextFragmentId(), setOperationNode, DataPartition.UNPARTITIONED);
+                    ctx.getNextFragmentId(), setOperationNode, DataPartition.UNPARTITIONED);
             // Absorb the plan trees of all childFragments into unionNode
             // and fix up the fragment tree in the process.
             for (int i = 0; i < childFragments.size(); ++i) {
@@ -735,7 +740,7 @@ public class DistributedPlanner {
                 setOperationFragment.setFragmentInPlanTree(setOperationNode.getChild(i));
                 setOperationFragment.addChildren(childFragments.get(i).getChildren());
             }
-            setOperationNode.init(ctx_.getRootAnalyzer());
+            setOperationNode.init(ctx.getRootAnalyzer());
             // All child fragments have been absorbed into unionFragment.
             fragments.removeAll(childFragments);
             return setOperationFragment;
@@ -743,9 +748,9 @@ public class DistributedPlanner {
 
         // There is at least one partitioned child fragment.
         // TODO(ML): here
-        PlanFragment setOperationFragment = new PlanFragment(ctx_.getNextFragmentId(), setOperationNode,
+        PlanFragment setOperationFragment = new PlanFragment(ctx.getNextFragmentId(), setOperationNode,
                 new DataPartition(TPartitionType.HASH_PARTITIONED,
-                        setOperationNode.getMaterializedResultExprLists_().get(0)));
+                        setOperationNode.getMaterializedResultExprLists().get(0)));
         for (int i = 0; i < childFragments.size(); ++i) {
             PlanFragment childFragment = childFragments.get(i);
             /* if (childFragment.isPartitioned() && childFragment.getPlanRoot().getNumInstances() > 1) {
@@ -771,7 +776,7 @@ public class DistributedPlanner {
             // Connect the unpartitioned child fragments to SetOperationNode via a random exchange.
             connectChildFragment(setOperationNode, i, setOperationFragment, childFragment);
             childFragment.setOutputPartition(
-                    DataPartition.hashPartitioned(setOperationNode.getMaterializedResultExprLists_().get(i)));
+                    DataPartition.hashPartitioned(setOperationNode.getMaterializedResultExprLists().get(i)));
         }
         return setOperationFragment;
     }
@@ -796,9 +801,9 @@ public class DistributedPlanner {
             PlanNode node, int childIdx,
             PlanFragment parentFragment, PlanFragment childFragment)
             throws UserException {
-        ExchangeNode exchangeNode = new ExchangeNode(ctx_.getNextNodeId(), childFragment.getPlanRoot(), false);
+        ExchangeNode exchangeNode = new ExchangeNode(ctx.getNextNodeId(), childFragment.getPlanRoot(), false);
         exchangeNode.setNumInstances(childFragment.getPlanRoot().getNumInstances());
-        exchangeNode.init(ctx_.getRootAnalyzer());
+        exchangeNode.init(ctx.getRootAnalyzer());
         exchangeNode.setFragment(parentFragment);
         node.setChild(childIdx, exchangeNode);
         childFragment.setDestination(exchangeNode);
@@ -816,10 +821,10 @@ public class DistributedPlanner {
      */
     private PlanFragment createParentFragment(PlanFragment childFragment, DataPartition parentPartition)
             throws UserException {
-        ExchangeNode exchangeNode = new ExchangeNode(ctx_.getNextNodeId(), childFragment.getPlanRoot(), false);
+        ExchangeNode exchangeNode = new ExchangeNode(ctx.getNextNodeId(), childFragment.getPlanRoot(), false);
         exchangeNode.setNumInstances(childFragment.getPlanRoot().getNumInstances());
-        exchangeNode.init(ctx_.getRootAnalyzer());
-        PlanFragment parentFragment = new PlanFragment(ctx_.getNextFragmentId(), exchangeNode, parentPartition);
+        exchangeNode.init(ctx.getRootAnalyzer());
+        PlanFragment parentFragment = new PlanFragment(ctx.getNextFragmentId(), exchangeNode, parentPartition);
         childFragment.setDestination(exchangeNode);
         childFragment.setOutputPartition(parentPartition);
         return parentFragment;
@@ -982,7 +987,9 @@ public class DistributedPlanner {
         DataPartition parentPartition = null;
         if (hasGrouping) {
             List<Expr> partitionExprs = node.getAggInfo().getPartitionExprs();
-            if (partitionExprs == null) partitionExprs = groupingExprs;
+            if (partitionExprs == null) {
+                partitionExprs = groupingExprs;
+            }
             // boolean childHasCompatPartition = ctx_.getRootAnalyzer().equivSets(partitionExprs,
             //         childFragment.getDataPartition().getPartitionExprs());
             // if (childHasCompatPartition && !childFragment.refsNullableTupleId(partitionExprs)) {
@@ -997,7 +1004,7 @@ public class DistributedPlanner {
             // the parent fragment is partitioned on the grouping exprs;
             // substitute grouping exprs to reference the *output* of the agg, not the input
             partitionExprs = Expr.substituteList(partitionExprs,
-                    node.getAggInfo().getIntermediateSmap(), ctx_.getRootAnalyzer(), false);
+                    node.getAggInfo().getIntermediateSmap(), ctx.getRootAnalyzer(), false);
             parentPartition = DataPartition.hashPartitioned(partitionExprs);
         } else {
             // the parent fragment is unpartitioned
@@ -1010,7 +1017,7 @@ public class DistributedPlanner {
         childFragment.addPlanRoot(node);
         node.setIntermediateTuple();
 
-        node.setIsPreagg(ctx_);
+        node.setIsPreagg(ctx);
 
         // if there is a limit, we need to transfer it from the pre-aggregation
         // node in the child fragment to the merge aggregation node in the parent
@@ -1020,9 +1027,9 @@ public class DistributedPlanner {
 
         // place a merge aggregation step in a new fragment
         PlanFragment mergeFragment = createParentFragment(childFragment, parentPartition);
-        AggregationNode mergeAggNode = new AggregationNode(ctx_.getNextNodeId(),
+        AggregationNode mergeAggNode = new AggregationNode(ctx.getNextNodeId(),
                 mergeFragment.getPlanRoot(), node.getAggInfo().getMergeAggInfo());
-        mergeAggNode.init(ctx_.getRootAnalyzer());
+        mergeAggNode.init(ctx.getRootAnalyzer());
         mergeAggNode.setLimit(limit);
         // Merge of non-grouping agg only processes one tuple per Impala daemon - codegen
         // will cost more than benefit.
@@ -1035,9 +1042,9 @@ public class DistributedPlanner {
         // HAVING predicates can only be evaluated after the merge agg step
         node.transferConjuncts(mergeAggNode);
         // Recompute stats after transferring the conjuncts_ (order is important).
-        node.computeStats(ctx_.getRootAnalyzer());
-        mergeFragment.getPlanRoot().computeStats(ctx_.getRootAnalyzer());
-        mergeAggNode.computeStats(ctx_.getRootAnalyzer());
+        node.computeStats(ctx.getRootAnalyzer());
+        mergeFragment.getPlanRoot().computeStats(ctx.getRootAnalyzer());
+        mergeAggNode.computeStats(ctx.getRootAnalyzer());
         // Set new plan root after updating stats.
         mergeFragment.addPlanRoot(mergeAggNode);
 
@@ -1073,7 +1080,7 @@ public class DistributedPlanner {
             // partitioning happens on the intermediate tuple of the 1st phase.
             partitionExprs = Expr.substituteList(
                     groupingExprs, firstPhaseAggInfo.getOutputToIntermediateSmap(),
-                    ctx_.getRootAnalyzer(), false);
+                    ctx.getRootAnalyzer(), false);
         } else {
             // We need to do
             // - child fragment:
@@ -1085,7 +1092,7 @@ public class DistributedPlanner {
             //   * merge agg of phase 2
             if (!isMultiDistinct) {
                 partitionExprs = Expr.substituteList(firstPhaseAggInfo.getGroupingExprs(),
-                        firstPhaseAggInfo.getIntermediateSmap(), ctx_.getRootAnalyzer(), false);
+                        firstPhaseAggInfo.getIntermediateSmap(), ctx.getRootAnalyzer(), false);
             }
         }
 
@@ -1103,14 +1110,14 @@ public class DistributedPlanner {
             // Convert the existing node to a preaggregation.
             AggregationNode preaggNode = (AggregationNode)node.getChild(0);
 
-            preaggNode.setIsPreagg(ctx_);
+            preaggNode.setIsPreagg(ctx);
 
             // place a merge aggregation step for the 1st phase in a new fragment
             mergeFragment = createParentFragment(childFragment, mergePartition);
             AggregateInfo phase1MergeAggInfo = firstPhaseAggInfo.getMergeAggInfo();
             AggregationNode phase1MergeAggNode =
-                    new AggregationNode(ctx_.getNextNodeId(), preaggNode, phase1MergeAggInfo);
-            phase1MergeAggNode.init(ctx_.getRootAnalyzer());
+                    new AggregationNode(ctx.getNextNodeId(), preaggNode, phase1MergeAggInfo);
+            phase1MergeAggNode.init(ctx.getRootAnalyzer());
             phase1MergeAggNode.unsetNeedsFinalize();
             phase1MergeAggNode.setIntermediateTuple();
             mergeFragment.addPlanRoot(phase1MergeAggNode);
@@ -1124,7 +1131,9 @@ public class DistributedPlanner {
         if (!hasGrouping && !isMultiDistinct) {
             // place the merge aggregation of the 2nd phase in an unpartitioned fragment;
             // add preceding merge fragment at end
-            if (mergeFragment != childFragment) fragments.add(mergeFragment);
+            if (mergeFragment != childFragment) {
+                fragments.add(mergeFragment);
+            }
 
             node.unsetNeedsFinalize();
             node.setIntermediateTuple();
@@ -1133,9 +1142,9 @@ public class DistributedPlanner {
             node.unsetLimit();
             mergeFragment = createParentFragment(mergeFragment, DataPartition.UNPARTITIONED);
             AggregateInfo phase2MergeAggInfo = node.getAggInfo().getMergeAggInfo();
-            AggregationNode phase2MergeAggNode = new AggregationNode(ctx_.getNextNodeId(), node,
+            AggregationNode phase2MergeAggNode = new AggregationNode(ctx.getNextNodeId(), node,
                     phase2MergeAggInfo);
-            phase2MergeAggNode.init(ctx_.getRootAnalyzer());
+            phase2MergeAggNode.init(ctx.getRootAnalyzer());
             // Transfer having predicates. If hasGrouping == true, the predicates should
             // instead be evaluated by the 2nd phase agg (the predicates are already there).
             node.transferConjuncts(phase2MergeAggNode);
@@ -1183,7 +1192,7 @@ public class DistributedPlanner {
         PlanFragment analyticFragment = childFragment;
         if (sortNode.getInputPartition() != null) {
             sortNode.getInputPartition().substitute(
-                    childFragment.getPlanRoot().getOutputSmap(), ctx_.getRootAnalyzer());
+                    childFragment.getPlanRoot().getOutputSmap(), ctx.getRootAnalyzer());
 
             // Make sure the childFragment's output is partitioned as required by the sortNode.
             // Even if the fragment and the sort partition exprs are equal, an exchange is
@@ -1242,8 +1251,8 @@ public class DistributedPlanner {
             childSortNode.setLimit(limit + offset);
         }
         childSortNode.setOffset(0);
-        childSortNode.computeStats(ctx_.getRootAnalyzer());
-        exchNode.computeStats(ctx_.getRootAnalyzer());
+        childSortNode.computeStats(ctx.getRootAnalyzer());
+        exchNode.computeStats(ctx.getRootAnalyzer());
 
         return mergeFragment;
     }
@@ -1262,8 +1271,8 @@ public class DistributedPlanner {
         mergeFragment.addPlanRoot(assertRowCountNode);
 
         // reset the stat of assert row count node
-        exchNode.computeStats(ctx_.getRootAnalyzer());
-        assertRowCountNode.computeStats(ctx_.getRootAnalyzer());
+        exchNode.computeStats(ctx.getRootAnalyzer());
+        assertRowCountNode.computeStats(ctx.getRootAnalyzer());
 
         return mergeFragment;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java
index d00dc12724..bda1f85aa5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/EmptySetNode.java
@@ -59,7 +59,9 @@ public class EmptySetNode extends PlanNode {
         // the logical output tuple is returned by getMaterializedTupleIds(). It needs
         // to be set as materialized (even though it isn't) to avoid failing precondition
         // checks generating the thrift for slot refs that may reference this tuple.
-        for (TupleId id: tupleIds) analyzer.getTupleDesc(id).setIsMaterialized(true);
+        for (TupleId id: tupleIds) {
+            analyzer.getTupleDesc(id).setIsMaterialized(true);
+        }
         computeTupleStatAndMemLayout(analyzer);
         computeStats(analyzer);
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java
index e9d1e3434a..c1c24960ec 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ExchangeNode.java
@@ -77,7 +77,9 @@ public class ExchangeNode extends PlanNode {
             this.conjuncts = Lists.newArrayList();
         }
         // Only apply the limit at the receiver if there are multiple senders.
-        if (inputNode.getFragment().isPartitioned()) limit = inputNode.limit;
+        if (inputNode.getFragment().isPartitioned()) {
+            limit = inputNode.limit;
+        }
         computeTupleIds();
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java
index 812b7d459d..ab04131c16 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/HashJoinNode.java
@@ -328,17 +328,27 @@ public class HashJoinNode extends PlanNode {
          * table/column of at least one side is missing stats.
          */
         public static EqJoinConjunctScanSlots create(Expr eqJoinConjunct) {
-            if (!Expr.IS_EQ_BINARY_PREDICATE.apply(eqJoinConjunct)) return null;
+            if (!Expr.IS_EQ_BINARY_PREDICATE.apply(eqJoinConjunct)) {
+                return null;
+            }
             SlotDescriptor lhsScanSlot = eqJoinConjunct.getChild(0).findSrcScanSlot();
-            if (lhsScanSlot == null || !hasNumRowsAndNdvStats(lhsScanSlot)) return null;
+            if (lhsScanSlot == null || !hasNumRowsAndNdvStats(lhsScanSlot)) {
+                return null;
+            }
             SlotDescriptor rhsScanSlot = eqJoinConjunct.getChild(1).findSrcScanSlot();
-            if (rhsScanSlot == null || !hasNumRowsAndNdvStats(rhsScanSlot)) return null;
+            if (rhsScanSlot == null || !hasNumRowsAndNdvStats(rhsScanSlot)) {
+                return null;
+            }
             return new EqJoinConjunctScanSlots(eqJoinConjunct, lhsScanSlot, rhsScanSlot);
         }
 
         private static boolean hasNumRowsAndNdvStats(SlotDescriptor slotDesc) {
-            if (slotDesc.getColumn() == null) return false;
-            if (!slotDesc.getStats().hasNumDistinctValues()) return false;
+            if (slotDesc.getColumn() == null) {
+                return false;
+            }
+            if (!slotDesc.getStats().hasNumDistinctValues()) {
+                return false;
+            }
             return true;
         }
 
@@ -381,7 +391,9 @@ public class HashJoinNode extends PlanNode {
         List<EqJoinConjunctScanSlots> eqJoinConjunctSlots = new ArrayList<>();
         for (Expr eqJoinConjunct : eqJoinConjuncts) {
             EqJoinConjunctScanSlots slots = EqJoinConjunctScanSlots.create(eqJoinConjunct);
-            if (slots != null) eqJoinConjunctSlots.add(slots);
+            if (slots != null) {
+                eqJoinConjunctSlots.add(slots);
+            }
         }
 
         if (eqJoinConjunctSlots.isEmpty()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ListPartitionPrunerV2.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ListPartitionPrunerV2.java
index 154f9cfbc2..fffff0fb66 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ListPartitionPrunerV2.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ListPartitionPrunerV2.java
@@ -181,8 +181,12 @@ public class ListPartitionPrunerV2 extends PartitionPrunerV2Base {
 
         @Override
         public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
+            if (this == o) {
+                return true;
+            }
+            if (o == null || getClass() != o.getClass()) {
+                return false;
+            }
             ListPartitionUniqueId that = (ListPartitionUniqueId) o;
             return partitionId == that.partitionId && partitionKeyIndex == that.partitionKeyIndex;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragment.java b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragment.java
index 4703f0827c..d49165e1ea 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragment.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragment.java
@@ -164,10 +164,16 @@ public class PlanFragment extends TreeNode<PlanFragment> {
      * different fragment.
      */
     public void setFragmentInPlanTree(PlanNode node) {
-        if (node == null) return;
+        if (node == null) {
+            return;
+        }
         node.setFragment(this);
-        if (node instanceof ExchangeNode) return;
-        for (PlanNode child : node.getChildren()) setFragmentInPlanTree(child);
+        if (node instanceof ExchangeNode) {
+            return;
+        }
+        for (PlanNode child : node.getChildren()) {
+            setFragmentInPlanTree(child);
+        }
     }
 
     /**
@@ -320,7 +326,9 @@ public class PlanFragment extends TreeNode<PlanFragment> {
     public PlanFragmentId getId() { return fragmentId; }
 
     public PlanFragment getDestFragment() {
-        if (destNode == null) return null;
+        if (destNode == null) {
+            return null;
+        }
         return destNode.getFragment();
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragmentId.java b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragmentId.java
index 7c0ff3ab35..72872b99bb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragmentId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanFragmentId.java
@@ -31,9 +31,9 @@ public class PlanFragmentId extends Id<PlanFragmentId> {
     public static IdGenerator<PlanFragmentId> createGenerator() {
         return new IdGenerator<PlanFragmentId>() {
             @Override
-            public PlanFragmentId getNextId() { return new PlanFragmentId(nextId_++); }
+            public PlanFragmentId getNextId() { return new PlanFragmentId(nextId++); }
             @Override
-            public PlanFragmentId getMaxId() { return new PlanFragmentId(nextId_ - 1); }
+            public PlanFragmentId getMaxId() { return new PlanFragmentId(nextId - 1); }
         };
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java
index d254e096c6..d43747ed11 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNode.java
@@ -111,7 +111,7 @@ abstract public class PlanNode extends TreeNode<PlanNode> {
 
     // Fragment that this PlanNode is executed in. Valid only after this PlanNode has been
     // assigned to a fragment. Set and maintained by enclosing PlanFragment.
-    protected PlanFragment fragment_;
+    protected PlanFragment fragment;
 
     // estimate of the output cardinality of this node; set in computeStats();
     // invalid: -1
@@ -242,7 +242,7 @@ abstract public class PlanNode extends TreeNode<PlanNode> {
     }
 
     public PlanFragmentId getFragmentId() {
-        return fragment_.getFragmentId();
+        return fragment.getFragmentId();
     }
 
     public void setFragmentId(PlanFragmentId id) {
@@ -250,11 +250,11 @@ abstract public class PlanNode extends TreeNode<PlanNode> {
     }
 
     public void setFragment(PlanFragment fragment) {
-        fragment_ = fragment;
+        this.fragment = fragment;
     }
 
     public PlanFragment getFragment() {
-        return fragment_;
+        return fragment;
     }
 
     public long getLimit() {
@@ -781,7 +781,9 @@ abstract public class PlanNode extends TreeNode<PlanNode> {
         // Collect all estimated selectivities.
         List<Double> selectivities = new ArrayList<>();
         for (Expr e : conjuncts) {
-            if (e.hasSelectivity()) selectivities.add(e.getSelectivity());
+            if (e.hasSelectivity()) {
+                selectivities.add(e.getSelectivity());
+            }
         }
         if (selectivities.size() != conjuncts.size()) {
             // Some conjuncts have no estimated selectivity. Use a single default
@@ -874,7 +876,9 @@ abstract public class PlanNode extends TreeNode<PlanNode> {
     public void clearRuntimeFilters() { runtimeFilters.clear(); }
 
     protected String getRuntimeFilterExplainString(boolean isBuildNode) {
-        if (runtimeFilters.isEmpty()) return "";
+        if (runtimeFilters.isEmpty()) {
+            return "";
+        }
         List<String> filtersStr = new ArrayList<>();
         for (RuntimeFilter filter: runtimeFilters) {
             StringBuilder filterStr = new StringBuilder();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNodeId.java b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNodeId.java
index 5b78dae858..26e5a0799f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNodeId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/PlanNodeId.java
@@ -31,9 +31,9 @@ public class PlanNodeId extends Id<PlanNodeId> {
     public static IdGenerator<PlanNodeId> createGenerator() {
         return new IdGenerator<PlanNodeId>() {
             @Override
-            public PlanNodeId getNextId() { return new PlanNodeId(nextId_++); }
+            public PlanNodeId getNextId() { return new PlanNodeId(nextId++); }
             @Override
-            public PlanNodeId getMaxId() { return new PlanNodeId(nextId_ - 1); }
+            public PlanNodeId getMaxId() { return new PlanNodeId(nextId - 1); }
         };
     }
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/PlannerContext.java b/fe/fe-core/src/main/java/org/apache/doris/planner/PlannerContext.java
index f1cf9d3405..3d9be70c42 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/PlannerContext.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/PlannerContext.java
@@ -41,30 +41,30 @@ public class PlannerContext {
     // used for determining whether a broadcast join is feasible.
     public final static double HASH_TBL_SPACE_OVERHEAD = 1.1;
 
-    private final IdGenerator<PlanNodeId> nodeIdGenerator_ = PlanNodeId.createGenerator();
-    private final IdGenerator<PlanFragmentId> fragmentIdGenerator_ =
+    private final IdGenerator<PlanNodeId> nodeIdGenerator = PlanNodeId.createGenerator();
+    private final IdGenerator<PlanFragmentId> fragmentIdGenerator =
             PlanFragmentId.createGenerator();
 
     // TODO(zc) private final TQueryCtx queryCtx_;
     // TODO(zc) private final AnalysisContext.AnalysisResult analysisResult_;
-    private final Analyzer analyzer_;
-    private final TQueryOptions queryOptions_;
-    private final QueryStmt queryStmt_;
-    private final StatementBase statement_;
+    private final Analyzer analyzer;
+    private final TQueryOptions queryOptions;
+    private final QueryStmt queryStmt;
+    private final StatementBase statement;
 
     public PlannerContext(Analyzer analyzer, QueryStmt queryStmt, TQueryOptions queryOptions, StatementBase statement) {
-        this.analyzer_ = analyzer;
-        this.queryStmt_ = queryStmt;
-        this.queryOptions_ = queryOptions;
-        this.statement_ = statement;
+        this.analyzer = analyzer;
+        this.queryStmt = queryStmt;
+        this.queryOptions = queryOptions;
+        this.statement = statement;
     }
 
-    public QueryStmt getQueryStmt() { return queryStmt_; }
-    public TQueryOptions getQueryOptions() { return queryOptions_; } // getRootAnalyzer().getQueryOptions(); }
-    public Analyzer getRootAnalyzer() { return analyzer_; } // analysisResult_.getAnalyzer(); }
+    public QueryStmt getQueryStmt() { return queryStmt; }
+    public TQueryOptions getQueryOptions() { return queryOptions; } // getRootAnalyzer().getQueryOptions(); }
+    public Analyzer getRootAnalyzer() { return analyzer; } // analysisResult_.getAnalyzer(); }
     public boolean isSingleNodeExec() { return getQueryOptions().num_nodes == 1; }
-    public PlanNodeId getNextNodeId() { return nodeIdGenerator_.getNextId(); }
-    public PlanFragmentId getNextFragmentId() { return fragmentIdGenerator_.getNextId(); }
+    public PlanNodeId getNextNodeId() { return nodeIdGenerator.getNextId(); }
+    public PlanFragmentId getNextFragmentId() { return fragmentIdGenerator.getNextId(); }
 
-    public boolean isInsert() { return statement_ instanceof InsertStmt; }
+    public boolean isInsert() { return statement instanceof InsertStmt; }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/RangePartitionPrunerV2.java b/fe/fe-core/src/main/java/org/apache/doris/planner/RangePartitionPrunerV2.java
index e122877128..d264247331 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/RangePartitionPrunerV2.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/RangePartitionPrunerV2.java
@@ -256,8 +256,12 @@ public class RangePartitionPrunerV2 extends PartitionPrunerV2Base {
 
         @Override
         public boolean equals(Object o) {
-            if (this == o) return true;
-            if (o == null || getClass() != o.getClass()) return false;
+            if (this == o) {
+                return true;
+            }
+            if (o == null || getClass() != o.getClass()) {
+                return false;
+            }
             RangePartitionUniqueId that = (RangePartitionUniqueId) o;
             return partitionId == that.partitionId;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java
index bfca278792..0ba37afbc6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ResultFileSink.java
@@ -41,7 +41,7 @@ public class ResultFileSink extends DataSink {
     private DataPartition outputPartition;
     private TupleId outputTupleId;
     private String header = "";
-    private String header_type = "";
+    private String headerType = "";
 
     public ResultFileSink(PlanNodeId exchNodeId, OutFileClause outFileClause) {
         this.exchNodeId = exchNodeId;
@@ -69,7 +69,7 @@ public class ResultFileSink extends DataSink {
                 outFileClause.getHeaderType().equals(FeConstants.csv_with_names_and_types)) {
             header = genNames(labels, outFileClause.getColumnSeparator(), outFileClause.getLineDelimiter());
         }
-        header_type = outFileClause.getHeaderType();
+        headerType = outFileClause.getHeaderType();
     }
 
     public String getBrokerName() {
@@ -118,7 +118,7 @@ public class ResultFileSink extends DataSink {
         tResultFileSink.setFileOptions(fileSinkOptions);
         tResultFileSink.setStorageBackendType(storageType.toThrift());
         tResultFileSink.setDestNodeId(exchNodeId.asInt());
-        tResultFileSink.setHeaderType(header_type);
+        tResultFileSink.setHeaderType(headerType);
         tResultFileSink.setHeader(header);
         if (outputTupleId != null) {
             tResultFileSink.setOutputTupleId(outputTupleId.asInt());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilter.java b/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilter.java
index 45c90d3025..a23570b815 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilter.java
@@ -142,7 +142,9 @@ public final class RuntimeFilter {
 
     @Override
     public boolean equals(Object obj) {
-        if (!(obj instanceof RuntimeFilter)) return false;
+        if (!(obj instanceof RuntimeFilter)) {
+            return false;
+        }
         return ((RuntimeFilter) obj).id.equals(id);
     }
 
@@ -198,13 +200,17 @@ public final class RuntimeFilter {
         // will filter the null value in child[0] while it is needed in the Null-safe equal join.
         // For example: select * from a join b where a.id<=>b.id
         // the null value in table a should be return by scan node instead of filtering it by runtimeFilter.
-        if (!Predicate.isUnNullSafeEquivalencePredicate(joinPredicate)) return null;
+        if (!Predicate.isUnNullSafeEquivalencePredicate(joinPredicate)) {
+            return null;
+        }
 
         BinaryPredicate normalizedJoinConjunct =
                 SingleNodePlanner.getNormalizedEqPred(joinPredicate,
                         filterSrcNode.getChild(0).getTupleIds(),
                         filterSrcNode.getChild(1).getTupleIds(), analyzer);
-        if (normalizedJoinConjunct == null) return null;
+        if (normalizedJoinConjunct == null) {
+            return null;
+        }
 
         // Ensure that the target expr does not contain TupleIsNull predicates as these
         // can't be evaluated at a scan node.
@@ -213,11 +219,15 @@ public final class RuntimeFilter {
         Expr srcExpr = normalizedJoinConjunct.getChild(1);
 
         if (srcExpr.getType().equals(ScalarType.createHllType())
-                || srcExpr.getType().equals(ScalarType.createType(PrimitiveType.BITMAP))) return null;
+                || srcExpr.getType().equals(ScalarType.createType(PrimitiveType.BITMAP))) {
+            return null;
+        }
 
         Map<TupleId, List<SlotId>> targetSlots = getTargetSlots(analyzer, targetExpr);
         Preconditions.checkNotNull(targetSlots);
-        if (targetSlots.isEmpty()) return null;
+        if (targetSlots.isEmpty()) {
+            return null;
+        }
 
         if (LOG.isTraceEnabled()) {
             LOG.trace("Generating runtime filter from predicate " + joinPredicate);
@@ -295,8 +305,9 @@ public final class RuntimeFilter {
             // TODO(zxy) Returns true if 'p' evaluates to true when all its referenced slots are NULL, returns false
             //  otherwise. Throws if backend expression evaluation fails.
             if (expr.isContainsFunction("COALESCE") || expr.isContainsFunction("IFNULL")
-                    || expr.isContainsClass("org.apache.doris.analysis.CaseExpr"))
+                    || expr.isContainsClass("org.apache.doris.analysis.CaseExpr")) {
                 return Collections.emptyMap();
+            }
         }
 
         Map<TupleId, List<SlotId>> slotsByTid = new HashMap<>();
@@ -304,7 +315,9 @@ public final class RuntimeFilter {
         // equivalent slots that are bound by the same base table tuple(s).
         for (SlotId slotId: sids) {
             Map<TupleId, List<SlotId>> currSlotsByTid = getBaseTblEquivSlots(analyzer, slotId);
-            if (currSlotsByTid.isEmpty()) return Collections.emptyMap();
+            if (currSlotsByTid.isEmpty()) {
+                return Collections.emptyMap();
+            }
             if (slotsByTid.isEmpty()) {
                 slotsByTid.putAll(currSlotsByTid);
                 continue;
@@ -329,7 +342,9 @@ public final class RuntimeFilter {
                     entry.getValue().addAll(slotIds);
                 }
             }
-            if (slotsByTid.isEmpty()) return Collections.emptyMap();
+            if (slotsByTid.isEmpty()) {
+                return Collections.emptyMap();
+            }
         }
         return slotsByTid;
     }
@@ -344,7 +359,9 @@ public final class RuntimeFilter {
         Map<TupleId, List<SlotId>> slotsByTid = new HashMap<>();
         for (SlotId targetSid: analyzer.getValueTransferTargets(srcSid)) {
             TupleDescriptor tupleDesc = analyzer.getSlotDesc(targetSid).getParent();
-            if (tupleDesc.getTable() == null) continue;
+            if (tupleDesc.getTable() == null) {
+                continue;
+            }
             List<SlotId> sids = slotsByTid.computeIfAbsent(tupleDesc.getId(), k -> new ArrayList<>());
             sids.add(targetSid);
         }
@@ -353,7 +370,9 @@ public final class RuntimeFilter {
 
     public Expr getTargetExpr(PlanNodeId targetPlanNodeId) {
         for (RuntimeFilterTarget target: targets) {
-            if (target.node.getId() != targetPlanNodeId) continue;
+            if (target.node.getId() != targetPlanNodeId) {
+                continue;
+            }
             return target.expr;
         }
         return null;
@@ -403,7 +422,7 @@ public final class RuntimeFilter {
             return;
         }
         double fpp = FeConstants.default_bloom_filter_fpp;
-        int logFilterSize = GetMinLogSpaceForBloomFilter(ndvEstimate, fpp);
+        int logFilterSize = getMinLogSpaceForBloomFilter(ndvEstimate, fpp);
         filterSizeBytes = 1L << logFilterSize;
         filterSizeBytes = Math.max(filterSizeBytes, filterSizeLimits.minVal);
         filterSizeBytes = Math.min(filterSizeBytes, filterSizeLimits.maxVal);
@@ -414,8 +433,10 @@ public final class RuntimeFilter {
      * filter with 'ndv' unique elements and a false positive probability of less
      * than 'fpp'.
      */
-    public static int GetMinLogSpaceForBloomFilter(long ndv, double fpp) {
-        if (0 == ndv) return 0;
+    public static int getMinLogSpaceForBloomFilter(long ndv, double fpp) {
+        if (0 == ndv) {
+            return 0;
+        }
         double k = 8; // BUCKET_WORDS
         // m is the number of bits we would need to get the fpp specified
         double m = -k * ndv / Math.log(1 - Math.pow(fpp, 1.0 / k));
@@ -430,17 +451,19 @@ public final class RuntimeFilter {
     public void assignToPlanNodes() {
         Preconditions.checkState(hasTargets());
         builderNode.addRuntimeFilter(this);
-        builderNode.fragment_.setBuilderRuntimeFilterIds(getFilterId());
+        builderNode.fragment.setBuilderRuntimeFilterIds(getFilterId());
         for (RuntimeFilterTarget target: targets) {
             target.node.addRuntimeFilter(this);
             // fragment is expected to use this filter id
-            target.node.fragment_.setTargetRuntimeFilterIds(this.id);
+            target.node.fragment.setTargetRuntimeFilterIds(this.id);
         }
     }
 
     public void registerToPlan(Analyzer analyzer) {
         setIsBroadcast(getBuilderNode().getDistributionMode() == HashJoinNode.DistributionMode.BROADCAST);
-        if (LOG.isTraceEnabled()) LOG.trace("Runtime filter: " + debugString());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Runtime filter: " + debugString());
+        }
         assignToPlanNodes();
         analyzer.putAssignedRuntimeFilter(this);
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterGenerator.java b/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterGenerator.java
index b6872c9ecf..b68df92ba4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterGenerator.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterGenerator.java
@@ -153,7 +153,9 @@ public final class RuntimeFilterGenerator {
         for (RuntimeFilter filter: filters) {
             filter.extractTargetsPosition();
             if (filter.getType() == TRuntimeFilterType.BLOOM) {
-                if (numBloomFilters >= maxNumBloomFilters) continue;
+                if (numBloomFilters >= maxNumBloomFilters) {
+                    continue;
+                }
                 ++numBloomFilters;
             }
             filter.registerToPlan(analyzer);
@@ -209,12 +211,16 @@ public final class RuntimeFilterGenerator {
             // Actually all types of Runtime Filter objects generated by the same joinConjunct have the same
             // properties except ID. Maybe consider avoiding repeated generation
             for (TRuntimeFilterType type : TRuntimeFilterType.values()) {
-                if ((sessionVariable.getRuntimeFilterType() & type.getValue()) == 0) continue;
+                if ((sessionVariable.getRuntimeFilterType() & type.getValue()) == 0) {
+                    continue;
+                }
                 for (int i = 0; i < joinConjuncts.size(); i++) {
                     Expr conjunct = joinConjuncts.get(i);
                     RuntimeFilter filter = RuntimeFilter.create(filterIdGenerator,
                             analyzer, conjunct, i, joinNode, type, bloomFilterSizeLimits);
-                    if (filter == null) continue;
+                    if (filter == null) {
+                        continue;
+                    }
                     registerRuntimeFilter(filter);
                     filters.add(filter);
                 }
@@ -223,7 +229,9 @@ public final class RuntimeFilterGenerator {
             // Finalize every runtime filter of that join. This is to ensure that we don't
             // assign a filter to a scan node from the right subtree of joinNode or ancestor
             // join nodes in case we don't find a destination node in the left subtree.
-            for (RuntimeFilter runtimeFilter: filters) finalizeRuntimeFilter(runtimeFilter);
+            for (RuntimeFilter runtimeFilter: filters) {
+                finalizeRuntimeFilter(runtimeFilter);
+            }
             generateFilters(root.getChild(1));
         } else if (root instanceof ScanNode) {
             assignRuntimeFilters((ScanNode) root);
@@ -284,20 +292,32 @@ public final class RuntimeFilterGenerator {
      * 2. Only olap scan nodes are supported:
      */
     private void assignRuntimeFilters(ScanNode scanNode) {
-        if (!(scanNode instanceof OlapScanNode)) return;
+        if (!(scanNode instanceof OlapScanNode)) {
+            return;
+        }
         TupleId tid = scanNode.getTupleIds().get(0);
-        if (!runtimeFiltersByTid.containsKey(tid)) return;
+        if (!runtimeFiltersByTid.containsKey(tid)) {
+            return;
+        }
         String runtimeFilterMode = sessionVariable.getRuntimeFilterMode();
         Preconditions.checkState(Arrays.stream(TRuntimeFilterMode.values()).map(Enum::name).anyMatch(
                 p -> p.equals(runtimeFilterMode.toUpperCase())), "runtimeFilterMode not expected");
         for (RuntimeFilter filter: runtimeFiltersByTid.get(tid)) {
-            if (filter.isFinalized()) continue;
+            if (filter.isFinalized()) {
+                continue;
+            }
             Expr targetExpr = computeTargetExpr(filter, tid);
-            if (targetExpr == null) continue;
+            if (targetExpr == null) {
+                continue;
+            }
             boolean isBoundByKeyColumns = isBoundByKeyColumns(analyzer, targetExpr, scanNode);
             boolean isLocalTarget = isLocalTarget(filter, scanNode);
-            if (runtimeFilterMode.equals(TRuntimeFilterMode.LOCAL.name()) && !isLocalTarget) continue;
-            if (runtimeFilterMode.equals(TRuntimeFilterMode.REMOTE.name()) && isLocalTarget) continue;
+            if (runtimeFilterMode.equals(TRuntimeFilterMode.LOCAL.name()) && !isLocalTarget) {
+                continue;
+            }
+            if (runtimeFilterMode.equals(TRuntimeFilterMode.REMOTE.name()) && isLocalTarget) {
+                continue;
+            }
 
             RuntimeFilter.RuntimeFilterTarget target = new RuntimeFilter.RuntimeFilterTarget(
                     scanNode, targetExpr, isBoundByKeyColumns, isLocalTarget);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterId.java b/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterId.java
index c4339b4ecb..f4c8e4ff1a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/RuntimeFilterId.java
@@ -30,12 +30,12 @@ public class RuntimeFilterId extends Id<RuntimeFilterId> {
         return new IdGenerator<RuntimeFilterId>() {
             @Override
             public RuntimeFilterId getNextId() {
-                return new RuntimeFilterId(nextId_++);
+                return new RuntimeFilterId(nextId++);
             }
 
             @Override
             public RuntimeFilterId getMaxId() {
-                return new RuntimeFilterId(nextId_ - 1);
+                return new RuntimeFilterId(nextId - 1);
             }
         };
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
index 41f3ad0b77..c308896c3a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
@@ -191,6 +191,7 @@ abstract public class ScanNode extends PlanNode {
                         break;
                     case CONVERT_SUCCESS:
                         result.intersect(ranges.ranges);
+                        break;
                     case CONVERT_FAILURE:
                     default:
                         break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java b/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java
index a378635b2c..b500fed25f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/SetOperationNode.java
@@ -58,54 +58,54 @@ public abstract class SetOperationNode extends PlanNode {
 
     // List of set operation result exprs of the originating SetOperationStmt. Used for
     // determining passthrough-compatibility of children.
-    protected List<Expr> setOpResultExprs_;
+    protected List<Expr> setOpResultExprs;
 
     // Expr lists corresponding to the input query stmts.
     // The ith resultExprList belongs to the ith child.
     // All exprs are resolved to base tables.
-    protected List<List<Expr>> resultExprLists_ = Lists.newArrayList();
+    protected List<List<Expr>> resultExprLists = Lists.newArrayList();
 
     // Expr lists that originate from constant select stmts.
     // We keep them separate from the regular expr lists to avoid null children.
-    protected List<List<Expr>> constExprLists_ = Lists.newArrayList();
+    protected List<List<Expr>> constExprLists = Lists.newArrayList();
 
     // Materialized result/const exprs corresponding to materialized slots.
     // Set in finalize() and substituted against the corresponding child's output smap.
-    protected List<List<Expr>> materializedResultExprLists_ = Lists.newArrayList();
-    protected List<List<Expr>> materializedConstExprLists_ = Lists.newArrayList();
+    protected List<List<Expr>> materializedResultExprLists = Lists.newArrayList();
+    protected List<List<Expr>> materializedConstExprLists = Lists.newArrayList();
 
     // Indicates if this UnionNode is inside a subplan.
-    protected boolean isInSubplan_;
+    protected boolean isInSubplan;
 
     // Index of the first non-passthrough child.
-    protected int firstMaterializedChildIdx_;
+    protected int firstMaterializedChildIdx;
 
-    protected final TupleId tupleId_;
+    protected final TupleId tupleId;
 
     protected SetOperationNode(PlanNodeId id, TupleId tupleId, String planNodeName) {
         super(id, tupleId.asList(), planNodeName);
-        setOpResultExprs_ = Lists.newArrayList();
-        tupleId_ = tupleId;
-        isInSubplan_ = false;
+        setOpResultExprs = Lists.newArrayList();
+        this.tupleId = tupleId;
+        isInSubplan = false;
     }
 
     protected SetOperationNode(PlanNodeId id, TupleId tupleId, String planNodeName,
                                List<Expr> setOpResultExprs, boolean isInSubplan) {
         super(id, tupleId.asList(), planNodeName);
-        setOpResultExprs_ = setOpResultExprs;
-        tupleId_ = tupleId;
-        isInSubplan_ = isInSubplan;
+        this.setOpResultExprs = setOpResultExprs;
+        this.tupleId = tupleId;
+        this.isInSubplan = isInSubplan;
     }
 
     public void addConstExprList(List<Expr> exprs) {
-        constExprLists_.add(exprs);
+        constExprLists.add(exprs);
     }
 
     /**
      * Returns true if this UnionNode has only constant exprs.
      */
     public boolean isConstantUnion() {
-        return resultExprLists_.isEmpty();
+        return resultExprLists.isEmpty();
     }
 
     /**
@@ -113,15 +113,15 @@ public abstract class SetOperationNode extends PlanNode {
      */
     public void addChild(PlanNode node, List<Expr> resultExprs) {
         super.addChild(node);
-        resultExprLists_.add(resultExprs);
+        resultExprLists.add(resultExprs);
     }
 
-    public List<List<Expr>> getMaterializedResultExprLists_() {
-        return materializedResultExprLists_;
+    public List<List<Expr>> getMaterializedResultExprLists() {
+        return materializedResultExprLists;
     }
 
-    public List<List<Expr>> getMaterializedConstExprLists_() {
-        return materializedConstExprLists_;
+    public List<List<Expr>> getMaterializedConstExprLists() {
+        return materializedConstExprLists;
     }
 
     @Override
@@ -149,11 +149,11 @@ public abstract class SetOperationNode extends PlanNode {
         }
         // drop resultExprs/constExprs that aren't getting materialized (= where the
         // corresponding output slot isn't being materialized)
-        materializedResultExprLists_.clear();
-        Preconditions.checkState(resultExprLists_.size() == children.size());
-        List<SlotDescriptor> slots = analyzer.getDescTbl().getTupleDesc(tupleId_).getSlots();
-        for (int i = 0; i < resultExprLists_.size(); ++i) {
-            List<Expr> exprList = resultExprLists_.get(i);
+        materializedResultExprLists.clear();
+        Preconditions.checkState(resultExprLists.size() == children.size());
+        List<SlotDescriptor> slots = analyzer.getDescTbl().getTupleDesc(tupleId).getSlots();
+        for (int i = 0; i < resultExprLists.size(); ++i) {
+            List<Expr> exprList = resultExprLists.get(i);
             List<Expr> newExprList = Lists.newArrayList();
             Preconditions.checkState(exprList.size() == slots.size());
             for (int j = 0; j < exprList.size(); ++j) {
@@ -161,14 +161,14 @@ public abstract class SetOperationNode extends PlanNode {
                     newExprList.add(exprList.get(j));
                 }
             }
-            materializedResultExprLists_.add(
+            materializedResultExprLists.add(
                     Expr.substituteList(newExprList, getChild(i).getOutputSmap(), analyzer, true));
         }
         Preconditions.checkState(
-                materializedResultExprLists_.size() == getChildren().size());
+                materializedResultExprLists.size() == getChildren().size());
 
-        materializedConstExprLists_.clear();
-        for (List<Expr> exprList : constExprLists_) {
+        materializedConstExprLists.clear();
+        for (List<Expr> exprList : constExprLists) {
             Preconditions.checkState(exprList.size() == slots.size());
             List<Expr> newExprList = Lists.newArrayList();
             for (int i = 0; i < exprList.size(); ++i) {
@@ -176,7 +176,7 @@ public abstract class SetOperationNode extends PlanNode {
                     newExprList.add(exprList.get(i));
                 }
             }
-            materializedConstExprLists_.add(newExprList);
+            materializedConstExprLists.add(newExprList);
         }
     }
 
@@ -195,7 +195,7 @@ public abstract class SetOperationNode extends PlanNode {
     }
 
     private void computeCardinality() {
-        cardinality = constExprLists_.size();
+        cardinality = constExprLists.size();
         for (PlanNode child : children) {
             // ignore missing child cardinality info in the hope it won't matter enough
             // to change the planning outcome
@@ -229,7 +229,7 @@ public abstract class SetOperationNode extends PlanNode {
         // If the Union node is inside a subplan, passthrough should be disabled to avoid
         // performance issues by forcing tiny batches.
         // TODO: Remove this as part of IMPALA-4179.
-        if (isInSubplan_) {
+        if (isInSubplan) {
             return false;
         }
         // Pass through is only done for the simple case where the row has a single tuple. One
@@ -238,35 +238,36 @@ public abstract class SetOperationNode extends PlanNode {
         if (childTupleIds.size() != 1) {
             return false;
         }
-        Preconditions.checkState(!setOpResultExprs_.isEmpty());
+        Preconditions.checkState(!setOpResultExprs.isEmpty());
 
-        TupleDescriptor setOpTupleDescriptor = analyzer.getDescTbl().getTupleDesc(tupleId_);
+        TupleDescriptor setOpTupleDescriptor = analyzer.getDescTbl().getTupleDesc(tupleId);
         TupleDescriptor childTupleDescriptor =
                 analyzer.getDescTbl().getTupleDesc(childTupleIds.get(0));
 
         // Verify that the set operation tuple descriptor has one slot for every expression.
-        Preconditions.checkState(setOpTupleDescriptor.getSlots().size() == setOpResultExprs_.size());
+        Preconditions.checkState(setOpTupleDescriptor.getSlots().size() == setOpResultExprs.size());
         // Verify that the set operation node has one slot for every child expression.
         Preconditions.checkState(
                 setOpTupleDescriptor.getSlots().size() == childExprList.size());
 
-        if (setOpResultExprs_.size() != childTupleDescriptor.getSlots().size()) {
+        if (setOpResultExprs.size() != childTupleDescriptor.getSlots().size()) {
             return false;
         }
         if (setOpTupleDescriptor.getByteSize() != childTupleDescriptor.getByteSize()) {
             return false;
         }
 
-        for (int i = 0; i < setOpResultExprs_.size(); ++i) {
-            if (!setOpTupleDescriptor.getSlots().get(i).isMaterialized())
+        for (int i = 0; i < setOpResultExprs.size(); ++i) {
+            if (!setOpTupleDescriptor.getSlots().get(i).isMaterialized()) {
                 continue;
-            SlotRef setOpSlotRef = setOpResultExprs_.get(i).unwrapSlotRef(false);
+            }
+            SlotRef setOpSlotRef = setOpResultExprs.get(i).unwrapSlotRef(false);
             SlotRef childSlotRef = childExprList.get(i).unwrapSlotRef(false);
             Preconditions.checkNotNull(setOpSlotRef);
             if (childSlotRef == null) {
                 return false;
             }
-            if (!childSlotRef.getDesc().LayoutEquals(setOpSlotRef.getDesc())) {
+            if (!childSlotRef.getDesc().layoutEquals(setOpSlotRef.getDesc())) {
                 return false;
             }
         }
@@ -283,22 +284,22 @@ public abstract class SetOperationNode extends PlanNode {
         List<List<Expr>> newResultExprLists = Lists.newArrayList();
         ArrayList<PlanNode> newChildren = Lists.newArrayList();
         for (int i = 0; i < children.size(); i++) {
-            if (isChildPassthrough(analyzer, children.get(i), resultExprLists_.get(i))) {
-                newResultExprLists.add(resultExprLists_.get(i));
+            if (isChildPassthrough(analyzer, children.get(i), resultExprLists.get(i))) {
+                newResultExprLists.add(resultExprLists.get(i));
                 newChildren.add(children.get(i));
             }
         }
-        firstMaterializedChildIdx_ = newChildren.size();
+        firstMaterializedChildIdx = newChildren.size();
 
         for (int i = 0; i < children.size(); i++) {
-            if (!isChildPassthrough(analyzer, children.get(i), resultExprLists_.get(i))) {
-                newResultExprLists.add(resultExprLists_.get(i));
+            if (!isChildPassthrough(analyzer, children.get(i), resultExprLists.get(i))) {
+                newResultExprLists.add(resultExprLists.get(i));
                 newChildren.add(children.get(i));
             }
         }
 
-        Preconditions.checkState(resultExprLists_.size() == newResultExprLists.size());
-        resultExprLists_ = newResultExprLists;
+        Preconditions.checkState(resultExprLists.size() == newResultExprLists.size());
+        resultExprLists = newResultExprLists;
         Preconditions.checkState(children.size() == newChildren.size());
         children = newChildren;
     }
@@ -320,30 +321,30 @@ public abstract class SetOperationNode extends PlanNode {
     }
 
     protected void toThrift(TPlanNode msg, TPlanNodeType nodeType) {
-        Preconditions.checkState( materializedResultExprLists_.size() == children.size());
+        Preconditions.checkState( materializedResultExprLists.size() == children.size());
         List<List<TExpr>> texprLists = Lists.newArrayList();
-        for (List<Expr> exprList : materializedResultExprLists_) {
+        for (List<Expr> exprList : materializedResultExprLists) {
             texprLists.add(Expr.treesToThrift(exprList));
         }
         List<List<TExpr>> constTexprLists = Lists.newArrayList();
-        for (List<Expr> constTexprList : materializedConstExprLists_) {
+        for (List<Expr> constTexprList : materializedConstExprLists) {
             constTexprLists.add(Expr.treesToThrift(constTexprList));
         }
-        Preconditions.checkState(firstMaterializedChildIdx_ <= children.size());
+        Preconditions.checkState(firstMaterializedChildIdx <= children.size());
         switch (nodeType) {
             case UNION_NODE:
                 msg.union_node = new TUnionNode(
-                        tupleId_.asInt(), texprLists, constTexprLists, firstMaterializedChildIdx_);
+                        tupleId.asInt(), texprLists, constTexprLists, firstMaterializedChildIdx);
                 msg.node_type = TPlanNodeType.UNION_NODE;
                 break;
             case INTERSECT_NODE:
                 msg.intersect_node = new TIntersectNode(
-                        tupleId_.asInt(), texprLists, constTexprLists, firstMaterializedChildIdx_);
+                        tupleId.asInt(), texprLists, constTexprLists, firstMaterializedChildIdx);
                 msg.node_type = TPlanNodeType.INTERSECT_NODE;
                 break;
             case EXCEPT_NODE:
                 msg.except_node = new TExceptNode(
-                        tupleId_.asInt(), texprLists, constTexprLists, firstMaterializedChildIdx_);
+                        tupleId.asInt(), texprLists, constTexprLists, firstMaterializedChildIdx);
                 msg.node_type = TPlanNodeType.EXCEPT_NODE;
                 break;
             default:
@@ -364,23 +365,23 @@ public abstract class SetOperationNode extends PlanNode {
         if (CollectionUtils.isNotEmpty(conjuncts)) {
             output.append(prefix).append("predicates: ").append(getExplainString(conjuncts)).append("\n");
         }
-        if (CollectionUtils.isNotEmpty(constExprLists_)) {
+        if (CollectionUtils.isNotEmpty(constExprLists)) {
             output.append(prefix).append("constant exprs: ").append("\n");
-            for (List<Expr> exprs : constExprLists_) {
+            for (List<Expr> exprs : constExprLists) {
                 output.append(prefix).append("    ").append(exprs.stream().map(Expr::toSql)
                         .collect(Collectors.joining(" | "))).append("\n");
             }
         }
         if (detailLevel == TExplainLevel.VERBOSE) {
-            if (CollectionUtils.isNotEmpty(materializedResultExprLists_)) {
+            if (CollectionUtils.isNotEmpty(materializedResultExprLists)) {
                 output.append(prefix).append("child exprs: ").append("\n");
-                for(List<Expr> exprs : materializedResultExprLists_) {
+                for(List<Expr> exprs : materializedResultExprLists) {
                     output.append(prefix).append("    ").append(exprs.stream().map(Expr::toSql)
                             .collect(Collectors.joining(" | "))).append("\n");
                 }
             }
             List<String> passThroughNodeIds = Lists.newArrayList();
-            for (int i = 0; i < firstMaterializedChildIdx_; ++i) {
+            for (int i = 0; i < firstMaterializedChildIdx; ++i) {
                 passThroughNodeIds.add(children.get(i).getId().toString());
             }
             if (!passThroughNodeIds.isEmpty()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogBuilder.java
index 0b07b5e2e0..59d8bd3c49 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogBuilder.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogBuilder.java
@@ -43,19 +43,19 @@ import java.util.Set;
 public class AuditLogBuilder extends Plugin implements AuditPlugin {
     private static final Logger LOG = LogManager.getLogger(AuditLogBuilder.class);
 
-    private PluginInfo pluginInfo;
+    private final PluginInfo pluginInfo;
 
-    private final String[] LOAD_ANNONATION_NAMES = {"JobId", "Label", "LoadType", "Db", "TableList",
+    private final static String[] LOAD_ANNONATION_NAMES = {"JobId", "Label", "LoadType", "Db", "TableList",
         "FilePathList", "BrokerUser", "Timestamp", "LoadStartTime", "LoadFinishTime", "ScanRows",
         "ScanBytes", "FileNumber"};
 
-    private Set<String> loadAnnotationSet;
+    private final Set<String> loadAnnotationSet;
 
-    private final String[] STREAM_LOAD_ANNONATION_NAMES = {"Label", "Db", "Table", "User", "ClientIp",
+    private final static String[] STREAM_LOAD_ANNONATION_NAMES = {"Label", "Db", "Table", "User", "ClientIp",
             "Status", "Message", "Url", "TotalRows", "LoadedRows", "FilteredRows", "UnselectedRows",
             "LoadBytes", "StartTime", "FinishTime"};
 
-    private Set<String> streamLoadAnnotationSet;
+    private final Set<String> streamLoadAnnotationSet;
 
     public AuditLogBuilder() {
         pluginInfo = new PluginInfo(PluginMgr.BUILTIN_PLUGIN_PREFIX + "AuditLogBuilder", PluginType.AUDIT,
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java b/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java
index 2c8ccc3ee1..c4bff97a6c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/QeProcessorImpl.java
@@ -103,19 +103,20 @@ public final class QeProcessorImpl implements QeProcessor {
                 maxQueryInstances = Config.default_max_query_instances;
             }
             if (maxQueryInstances > 0) {
-                AtomicInteger currentCount = userToInstancesCount.computeIfAbsent(user, __ -> new AtomicInteger(0));
+                AtomicInteger currentCount = userToInstancesCount
+                        .computeIfAbsent(user, ignored -> new AtomicInteger(0));
                 // Many query can reach here.
                 if (instancesNum + currentCount.get() > maxQueryInstances) {
                     throw new UserException("reach max_query_instances " + maxQueryInstances);
                 }
             }
             queryToInstancesNum.put(queryId, instancesNum);
-            userToInstancesCount.computeIfAbsent(user, __ -> new AtomicInteger(0)).addAndGet(instancesNum);
+            userToInstancesCount.computeIfAbsent(user, ignored -> new AtomicInteger(0)).addAndGet(instancesNum);
         }
     }
 
     public Map<String, Integer> getInstancesNumPerUser() {
-        return Maps.transformEntries(userToInstancesCount, (__, value) -> value != null ? value.get() : 0);
+        return Maps.transformEntries(userToInstancesCount, (ignored, value) -> value != null ? value.get() : 0);
     }
 
     @Override
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/QueryStateException.java b/fe/fe-core/src/main/java/org/apache/doris/qe/QueryStateException.java
index e4efa4513a..ae46d9bc4d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/QueryStateException.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/QueryStateException.java
@@ -31,7 +31,7 @@ public class QueryStateException extends UserException {
 
     public void createQueryState(MysqlStateType stateType, String msg) {
         this.queryState = new QueryState();
-        switch (stateType) {
+        switch (stateType) { // CHECKSTYLE IGNORE THIS LINE: missing switch default
             case OK:
                 queryState.setOk(0L, 0, msg);
                 break;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java
index 56f8659c5b..b4022f6838 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java
@@ -718,7 +718,9 @@ public class StmtExecutor implements ProfileWriter {
                 if (LOG.isTraceEnabled()) {
                     LOG.trace("rewrittenStmt: " + parsedStmt.toSql());
                 }
-                if (explainOptions != null) parsedStmt.setIsExplain(explainOptions);
+                if (explainOptions != null) {
+                    parsedStmt.setIsExplain(explainOptions);
+                }
             }
         }
         plannerProfile.setQueryAnalysisFinishTime();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheAnalyzer.java b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheAnalyzer.java
index 99bb658c67..27d30f4a56 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheAnalyzer.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheAnalyzer.java
@@ -152,7 +152,7 @@ public class CacheAnalyzer {
             return Long.compare(table.latestTime, this.latestTime);
         }
 
-        public void Debug() {
+        public void debug() {
             LOG.debug("table {}, partition id {}, ver {}, time {}", olapTable.getName(), latestPartitionId, latestVersion, latestTime);
         }
     }
@@ -213,7 +213,7 @@ public class CacheAnalyzer {
         MetricRepo.COUNTER_QUERY_OLAP_TABLE.increase(1L);
         Collections.sort(tblTimeList);
         latestTable = tblTimeList.get(0);
-        latestTable.Debug();
+        latestTable.debug();
 
         addAllViewStmt(selectStmt);
         String allViewExpandStmtListStr = StringUtils.join(allViewStmtSet, "|");
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheCoordinator.java b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheCoordinator.java
index 4c73d083c4..91f3f93660 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheCoordinator.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheCoordinator.java
@@ -43,7 +43,7 @@ public class CacheCoordinator {
     private static final Logger LOG = LogManager.getLogger(CacheCoordinator.class);
     private static final int VIRTUAL_NODES = 10;
     private static final int REFRESH_NODE_TIME = 300000;
-    public boolean DebugModel = false;
+    public boolean debugModel = false;
     private Hashtable<Long, Backend> realNodes = new Hashtable<>();
     private SortedMap<Long, Backend> virtualNodes = new TreeMap<>();
     private static Lock belock = new ReentrantLock();
@@ -109,7 +109,7 @@ public class CacheCoordinator {
             belock.lock();
             ImmutableMap<Long, Backend> idToBackend = Catalog.getCurrentSystemInfo().getIdToBackend();
             if (idToBackend != null) {
-                if (!DebugModel) {
+                if (!debugModel) {
                     clearBackend(idToBackend);
                 }
                 for (Backend backend : idToBackend.values().asList()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheProxy.java b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheProxy.java
index 63f1ba91c4..20d6f5d0d0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheProxy.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/CacheProxy.java
@@ -47,12 +47,8 @@ public abstract class CacheProxy {
     }
 
     public static CacheProxy getCacheProxy(CacheProxyType type) {
-        switch (type) {
-            case BE:
-                return new CacheBeProxy();
-            case FE:
-            case OUTER:
-                return null;
+        if (CacheProxyType.BE == type) {
+            return new CacheBeProxy();
         }
         return null;
     }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/PartitionRange.java b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/PartitionRange.java
index 5a4ab07485..d4194f380f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/PartitionRange.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/PartitionRange.java
@@ -117,7 +117,7 @@ public class PartitionRange {
             this.tooNew = false;
         }
 
-        public void Debug() {
+        public void debug() {
             if (partition != null) {
                 LOG.info("partition id {}, cacheKey {}, version {}, time {}, fromCache {}, tooNew {} ",
                         partitionId, cacheKey.realValue(),
@@ -196,6 +196,8 @@ public class PartitionRange {
                     value = expr.getLongValue();
                     keyType = KeyType.LONG;
                     break;
+                default:
+                    return true;
             }
             return true;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/RowBatchBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/RowBatchBuilder.java
index 5dc82d0e6a..37b91b6f89 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/cache/RowBatchBuilder.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/cache/RowBatchBuilder.java
@@ -166,10 +166,6 @@ public class RowBatchBuilder {
             Long key = entry.getKey();
             PartitionRange.PartitionSingle partition = cachePartMap.get(key);
             partitionRowList = entry.getValue();
-            int data_size = 0;
-            for (byte[] buf : partitionRowList) {
-                data_size += buf.length;
-            }
             updateRequest = updateRequest.toBuilder()
                     .addValues(InternalService.PCacheValue.newBuilder()
                             .setParam(InternalService.PCacheParam.newBuilder()
@@ -177,7 +173,7 @@ public class RowBatchBuilder {
                                     .setLastVersion(partition.getPartition().getVisibleVersion())
                                     .setLastVersionTime(partition.getPartition().getVisibleVersionTime())
                                     .build()).setDataSize(dataSize).addAllRows(
-                                    partitionRowList.stream().map(row -> ByteString.copyFrom(row))
+                                    partitionRowList.stream().map(ByteString::copyFrom)
                                             .collect(Collectors.toList()))).build();
         }
         return updateRequest;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/resource/Tag.java b/fe/fe-core/src/main/java/org/apache/doris/resource/Tag.java
index 88e939183a..cdbb485ff1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/resource/Tag.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/resource/Tag.java
@@ -107,7 +107,9 @@ public class Tag implements Writable {
 
     @Override
     public boolean equals(Object other) {
-        if (other == this) return true;
+        if (other == this) {
+            return true;
+        }
         if (!(other instanceof Tag)) {
             return false;
         }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/rewrite/BetweenToCompoundRule.java b/fe/fe-core/src/main/java/org/apache/doris/rewrite/BetweenToCompoundRule.java
index 5373a1000d..c73908a10d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/rewrite/BetweenToCompoundRule.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/rewrite/BetweenToCompoundRule.java
@@ -42,7 +42,9 @@ public final class BetweenToCompoundRule implements ExprRewriteRule {
 
     @Override
     public Expr apply(Expr expr, Analyzer analyzer, ExprRewriter.ClauseType clauseType) throws AnalysisException {
-        if (!(expr instanceof BetweenPredicate)) return expr;
+        if (!(expr instanceof BetweenPredicate)) {
+            return expr;
+        }
         BetweenPredicate bp = (BetweenPredicate) expr;
         Expr result = null;
         if (bp.isNotBetween()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/rewrite/CompoundPredicateWriteRule.java b/fe/fe-core/src/main/java/org/apache/doris/rewrite/CompoundPredicateWriteRule.java
index b035e1fd83..1e78dd4710 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/rewrite/CompoundPredicateWriteRule.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/rewrite/CompoundPredicateWriteRule.java
@@ -46,7 +46,9 @@ public class CompoundPredicateWriteRule implements ExprRewriteRule {
     @Override
     public Expr apply(Expr expr, Analyzer analyzer, ExprRewriter.ClauseType clauseType) throws AnalysisException {
 
-        if (!(expr instanceof CompoundPredicate)) return expr;
+        if (!(expr instanceof CompoundPredicate)) {
+            return expr;
+        }
         CompoundPredicate cp = (CompoundPredicate) expr;
 
         List<Expr> children = cp.getChildren();
@@ -66,22 +68,38 @@ public class CompoundPredicateWriteRule implements ExprRewriteRule {
         boolean rightChildFalse = (rightChild instanceof BoolLiteral) && (!((BoolLiteral) rightChild).getValue());
 
         // case true and expr ==> expr
-        if (leftChildTrue && and) return rightChild;
+        if (leftChildTrue && and) {
+            return rightChild;
+        }
         // case expr and true ==> expr
-        if (and && rightChildTrue) return leftChild;
+        if (and && rightChildTrue) {
+            return leftChild;
+        }
         // case false or expr ==> expr
-        if (leftChildFalse && or) return rightChild;
+        if (leftChildFalse && or) {
+            return rightChild;
+        }
         // case expr or false ==> expr
-        if (or && rightChildFalse) return leftChild;
+        if (or && rightChildFalse) {
+            return leftChild;
+        }
 
         // case false and expr ==> false
-        if (leftChildFalse && and) return new BoolLiteral(false);
+        if (leftChildFalse && and) {
+            return new BoolLiteral(false);
+        }
         // case expr and false ==> false
-        if (and && rightChildFalse) return new BoolLiteral(false);
+        if (and && rightChildFalse) {
+            return new BoolLiteral(false);
+        }
         // case true or expr ==> true
-        if (leftChildTrue && or) return new BoolLiteral(true);
+        if (leftChildTrue && or) {
+            return new BoolLiteral(true);
+        }
         // case expr or true ==> true
-        if (or && rightChildTrue) return new BoolLiteral(true);
+        if (or && rightChildTrue) {
+            return new BoolLiteral(true);
+        }
 
         // other case ,return origin expr
         return expr;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExprRewriter.java b/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExprRewriter.java
index 29fd5d4e71..9b8ee0d8e0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExprRewriter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/rewrite/ExprRewriter.java
@@ -47,8 +47,8 @@ import java.util.Map;
  * Doris match different Rewriter framework execution.
  */
 public class ExprRewriter {
-    private int numChanges_ = 0;
-    private final List<ExprRewriteRule> rules_;
+    private int numChanges = 0;
+    private final List<ExprRewriteRule> rules;
 
     // The type of clause that executes the rule.
     // This type is only used in InferFiltersRule, RewriteDateLiteralRule, other rules are not used
@@ -59,19 +59,19 @@ public class ExprRewriter {
     }
 
     // Once-only Rules
-    private List<ExprRewriteRule> onceRules_ = Lists.newArrayList();
+    private List<ExprRewriteRule> onceRules = Lists.newArrayList();
 
     public ExprRewriter(List<ExprRewriteRule> rules) {
-        rules_ = rules;
+        this.rules = rules;
     }
 
     public ExprRewriter(List<ExprRewriteRule> rules, List<ExprRewriteRule> onceRules) {
-        rules_ = rules;
-        onceRules_ = onceRules;
+        this.rules = rules;
+        this.onceRules = onceRules;
     }
 
     public ExprRewriter(ExprRewriteRule rule) {
-        rules_ = Lists.newArrayList(rule);
+        rules = Lists.newArrayList(rule);
     }
 
     public Expr rewrite(Expr expr, Analyzer analyzer) throws AnalysisException {
@@ -84,17 +84,17 @@ public class ExprRewriter {
         int oldNumChanges;
         Expr rewrittenExpr = expr;
         do {
-            oldNumChanges = numChanges_;
-            for (ExprRewriteRule rule: rules_) {
+            oldNumChanges = numChanges;
+            for (ExprRewriteRule rule: rules) {
                 // when foldConstantByBe is on, fold all constant expr by BE instead of applying FoldConstantsRule in FE.
                 if (rule instanceof FoldConstantsRule && analyzer.safeIsEnableFoldConstantByBe()) {
                     continue;
                 }
                 rewrittenExpr = applyRuleRepeatedly(rewrittenExpr, rule, analyzer, clauseType);
             }
-        } while (oldNumChanges != numChanges_);
+        } while (oldNumChanges != numChanges);
 
-        for (ExprRewriteRule rule: onceRules_) {
+        for (ExprRewriteRule rule: onceRules) {
             rewrittenExpr = applyRuleOnce(rewrittenExpr, rule, analyzer, clauseType);
         }
         return rewrittenExpr;
@@ -103,7 +103,7 @@ public class ExprRewriter {
     private Expr applyRuleOnce(Expr expr, ExprRewriteRule rule, Analyzer analyzer, ClauseType clauseType) throws AnalysisException {
         Expr rewrittenExpr = rule.apply(expr, analyzer, clauseType);
         if (rewrittenExpr != expr) {
-            numChanges_++;
+            numChanges++;
         }
         return rewrittenExpr;
     }
@@ -117,13 +117,13 @@ public class ExprRewriter {
         }
         boolean changed = false;
         // rewrite constant expr
-        for (ExprRewriteRule rule : rules_) {
+        for (ExprRewriteRule rule : rules) {
             if (rule instanceof FoldConstantsRule) {
                 changed = ((FoldConstantsRule) rule).apply(exprMap, analyzer, changed);
             }
         }
         if (changed) {
-            ++numChanges_;
+            ++numChanges;
         }
     }
 
@@ -136,9 +136,9 @@ public class ExprRewriter {
         int oldNumChanges;
         Expr rewrittenExpr = expr;
         do {
-            oldNumChanges = numChanges_;
+            oldNumChanges = numChanges;
             rewrittenExpr = applyRuleBottomUp(rewrittenExpr, rule, analyzer, clauseType);
-        } while (oldNumChanges != numChanges_);
+        } while (oldNumChanges != numChanges);
         return rewrittenExpr;
     }
 
@@ -152,15 +152,19 @@ public class ExprRewriter {
             expr.setChild(i, applyRuleBottomUp(expr.getChild(i), rule, analyzer, clauseType));
         }
         Expr rewrittenExpr = rule.apply(expr, analyzer, clauseType);
-        if (rewrittenExpr != expr) ++numChanges_;
+        if (rewrittenExpr != expr) {
+            ++numChanges;
+        }
         return rewrittenExpr;
     }
 
     public void rewriteList(List<Expr> exprs, Analyzer analyzer) throws AnalysisException {
-        for (int i = 0; i < exprs.size(); ++i) exprs.set(i, rewrite(exprs.get(i), analyzer));
+        for (int i = 0; i < exprs.size(); ++i) {
+            exprs.set(i, rewrite(exprs.get(i), analyzer));
+        }
     }
 
-    public void reset() { numChanges_ = 0; }
-    public boolean changed() { return numChanges_ > 0; }
-    public int getNumChanges() { return numChanges_; }
+    public void reset() { numChanges = 0; }
+    public boolean changed() { return numChanges > 0; }
+    public int getNumChanges() { return numChanges; }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/rewrite/NormalizeBinaryPredicatesRule.java b/fe/fe-core/src/main/java/org/apache/doris/rewrite/NormalizeBinaryPredicatesRule.java
index 5940961e03..7743928fa6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/rewrite/NormalizeBinaryPredicatesRule.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/rewrite/NormalizeBinaryPredicatesRule.java
@@ -38,9 +38,15 @@ public class NormalizeBinaryPredicatesRule implements ExprRewriteRule {
 
     @Override
     public Expr apply(Expr expr, Analyzer analyzer, ExprRewriter.ClauseType clauseType) throws AnalysisException {
-        if (!(expr instanceof BinaryPredicate)) return expr;
-        if (expr.getChild(0).unwrapSlotRef(false) != null) return expr;
-        if (expr.getChild(1).unwrapSlotRef(false) == null) return expr;
+        if (!(expr instanceof BinaryPredicate)) {
+            return expr;
+        }
+        if (expr.getChild(0).unwrapSlotRef(false) != null) {
+            return expr;
+        }
+        if (expr.getChild(1).unwrapSlotRef(false) == null) {
+            return expr;
+        }
 
         BinaryPredicate.Operator op = ((BinaryPredicate) expr).getOp();
 
diff --git a/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteBinaryPredicatesRule.java b/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteBinaryPredicatesRule.java
index 95f3ea73bc..de3bae9057 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteBinaryPredicatesRule.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteBinaryPredicatesRule.java
@@ -86,7 +86,9 @@ public class RewriteBinaryPredicatesRule implements ExprRewriteRule {
 
     @Override
     public Expr apply(Expr expr, Analyzer analyzer, ExprRewriter.ClauseType clauseType) throws AnalysisException {
-        if (!(expr instanceof BinaryPredicate)) return expr;
+        if (!(expr instanceof BinaryPredicate)) {
+            return expr;
+        }
         BinaryPredicate.Operator op = ((BinaryPredicate) expr).getOp();
         Expr expr0 = expr.getChild(0);
         Expr expr1 = expr.getChild(1);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteDateLiteralRule.java b/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteDateLiteralRule.java
index 2448903fc6..c3eee2f3f6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteDateLiteralRule.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/rewrite/RewriteDateLiteralRule.java
@@ -40,7 +40,9 @@ public class RewriteDateLiteralRule implements ExprRewriteRule {
 
     @Override
     public Expr apply(Expr expr, Analyzer analyzer, ExprRewriter.ClauseType clauseType) throws AnalysisException {
-        if (!(expr instanceof BinaryPredicate)) return expr;
+        if (!(expr instanceof BinaryPredicate)) {
+            return expr;
+        }
         Expr lchild = expr.getChild(0);
         if (!lchild.getType().isDateType()) {
             return expr;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/service/FrontendOptions.java b/fe/fe-core/src/main/java/org/apache/doris/service/FrontendOptions.java
index 857073bb6b..4f864bfc2d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/service/FrontendOptions.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/service/FrontendOptions.java
@@ -100,13 +100,13 @@ public class FrontendOptions {
     }
 
     private static void analyzePriorityCidrs() {
-        String prior_cidrs = Config.priority_networks;
-        if (Strings.isNullOrEmpty(prior_cidrs)) {
+        String priorCidrs = Config.priority_networks;
+        if (Strings.isNullOrEmpty(priorCidrs)) {
             return;
         }
-        LOG.info("configured prior_cidrs value: {}", prior_cidrs);
+        LOG.info("configured prior_cidrs value: {}", priorCidrs);
 
-        String[] cidrList = prior_cidrs.split(PRIORITY_CIDR_SEPARATOR);
+        String[] cidrList = priorCidrs.split(PRIORITY_CIDR_SEPARATOR);
         List<String> priorNetworks = Lists.newArrayList(cidrList);
         for (String cidrStr : priorNetworks) {
             priorityCidrs.add(new CIDR(cidrStr));
diff --git a/fe/fe-core/src/main/java/org/apache/doris/statistics/BaseStatsDerive.java b/fe/fe-core/src/main/java/org/apache/doris/statistics/BaseStatsDerive.java
index 0e1301903d..4a8e495895 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/BaseStatsDerive.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/BaseStatsDerive.java
@@ -105,7 +105,9 @@ public class BaseStatsDerive {
         // Collect all estimated selectivities.
         List<Double> selectivities = new ArrayList<>();
         for (Expr e : conjuncts) {
-            if (e.hasSelectivity()) selectivities.add(e.getSelectivity());
+            if (e.hasSelectivity()) {
+                selectivities.add(e.getSelectivity());
+            }
         }
         if (selectivities.size() != conjuncts.size()) {
             // Some conjuncts have no estimated selectivity. Use a single default
diff --git a/fe/fe-core/src/main/java/org/apache/doris/statistics/DeriveFactory.java b/fe/fe-core/src/main/java/org/apache/doris/statistics/DeriveFactory.java
index d663bf5e08..9ff438e7f5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/statistics/DeriveFactory.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/DeriveFactory.java
@@ -23,14 +23,14 @@ public class DeriveFactory {
 
     public BaseStatsDerive getStatsDerive(PlanNode.NodeType nodeType) {
         switch (nodeType) {
+            case OLAP_SCAN_NODE:
+                return new OlapScanStatsDerive();
             case AGG_NODE:
             case HASH_JOIN_NODE:
             case MERGE_NODE:
-                break;
-            case OLAP_SCAN_NODE:
-                return new OlapScanStatsDerive();
             case DEFAULT:
+            default:
+                return new BaseStatsDerive();
         }
-        return new BaseStatsDerive();
     }
 }
diff --git a/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java b/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java
index 374a6ccee2..1388a6acc8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java
@@ -43,6 +43,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
@@ -569,6 +570,11 @@ public class Backend implements Writable {
         brpcPort = in.readInt();
     }
 
+    @Override
+    public int hashCode() {
+        return Objects.hash(id, host, heartbeatPort, bePort, isAlive);
+    }
+
     @Override
     public boolean equals(Object obj) {
         if (this == obj) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/task/UpdateTabletMetaInfoTask.java b/fe/fe-core/src/main/java/org/apache/doris/task/UpdateTabletMetaInfoTask.java
index 97770d0556..d2bfa158cf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/task/UpdateTabletMetaInfoTask.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/task/UpdateTabletMetaInfoTask.java
@@ -146,6 +146,8 @@ public class UpdateTabletMetaInfoTask extends AgentTask {
                 }
                 break;
             }
+            default:
+                break;
         }
         updateTabletMetaInfoReq.setTabletMetaInfos(metaInfos);
         return updateTabletMetaInfoReq;
diff --git a/fe/fe-core/src/test/java/org/apache/doris/alter/AlterTest.java b/fe/fe-core/src/test/java/org/apache/doris/alter/AlterTest.java
index 0d3a0bb233..869231216e 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/alter/AlterTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/alter/AlterTest.java
@@ -935,35 +935,35 @@ public class AlterTest {
         stmt = "alter table test.odbc_table add column k6 INT KEY after k1, add column k7 TINYINT KEY after k6";
         alterTable(stmt, false);
         Database db = Catalog.getCurrentCatalog().getDbOrMetaException("default_cluster:test");
-        Table odbc_table = db.getTableOrMetaException("odbc_table");
-        Assert.assertEquals(odbc_table.getBaseSchema().size(), 7);
-        Assert.assertEquals(odbc_table.getBaseSchema().get(1).getDataType(), PrimitiveType.INT);
-        Assert.assertEquals(odbc_table.getBaseSchema().get(2).getDataType(), PrimitiveType.TINYINT);
+        Table odbcTable = db.getTableOrMetaException("odbc_table");
+        Assert.assertEquals(odbcTable.getBaseSchema().size(), 7);
+        Assert.assertEquals(odbcTable.getBaseSchema().get(1).getDataType(), PrimitiveType.INT);
+        Assert.assertEquals(odbcTable.getBaseSchema().get(2).getDataType(), PrimitiveType.TINYINT);
 
         // external table support drop column
         stmt = "alter table test.odbc_table drop column k7";
         alterTable(stmt, false);
         db = Catalog.getCurrentCatalog().getDbOrMetaException("default_cluster:test");
-        odbc_table = db.getTableOrMetaException("odbc_table");
-        Assert.assertEquals(odbc_table.getBaseSchema().size(), 6);
+        odbcTable = db.getTableOrMetaException("odbc_table");
+        Assert.assertEquals(odbcTable.getBaseSchema().size(), 6);
 
         // external table support modify column
         stmt = "alter table test.odbc_table modify column k6 bigint after k5";
         alterTable(stmt, false);
         db = Catalog.getCurrentCatalog().getDbOrMetaException("default_cluster:test");
-        odbc_table = db.getTableOrMetaException("odbc_table");
-        Assert.assertEquals(odbc_table.getBaseSchema().size(), 6);
-        Assert.assertEquals(odbc_table.getBaseSchema().get(5).getDataType(), PrimitiveType.BIGINT);
+        odbcTable = db.getTableOrMetaException("odbc_table");
+        Assert.assertEquals(odbcTable.getBaseSchema().size(), 6);
+        Assert.assertEquals(odbcTable.getBaseSchema().get(5).getDataType(), PrimitiveType.BIGINT);
 
         // external table support reorder column
         db = Catalog.getCurrentCatalog().getDbOrMetaException("default_cluster:test");
-        odbc_table = db.getTableOrMetaException("odbc_table");
-        Assert.assertTrue(odbc_table.getBaseSchema().stream().
+        odbcTable = db.getTableOrMetaException("odbc_table");
+        Assert.assertTrue(odbcTable.getBaseSchema().stream().
                 map(column -> column.getName()).
                 reduce("", (totalName, columnName) -> totalName + columnName).equals("k1k2k3k4k5k6"));
         stmt = "alter table test.odbc_table order by (k6, k5, k4, k3, k2, k1)";
         alterTable(stmt, false);
-        Assert.assertTrue(odbc_table.getBaseSchema().stream().
+        Assert.assertTrue(odbcTable.getBaseSchema().stream().
                 map(column -> column.getName()).
                 reduce("", (totalName, columnName) -> totalName + columnName).equals("k6k5k4k3k2k1"));
 
@@ -979,19 +979,19 @@ public class AlterTest {
         stmt = "alter table test.odbc_table drop column k2";
         alterTable(stmt, false);
         // do not allow drop last column
-        Assert.assertEquals(odbc_table.getBaseSchema().size(), 1);
+        Assert.assertEquals(odbcTable.getBaseSchema().size(), 1);
         stmt = "alter table test.odbc_table drop column k1";
         alterTable(stmt, true);
-        Assert.assertEquals(odbc_table.getBaseSchema().size(), 1);
+        Assert.assertEquals(odbcTable.getBaseSchema().size(), 1);
 
         // external table support rename operation
         stmt = "alter table test.odbc_table rename oracle_table";
         alterTable(stmt, false);
         db = Catalog.getCurrentCatalog().getDbOrMetaException("default_cluster:test");
-        odbc_table = db.getTableNullable("oracle_table");
-        Assert.assertNotNull(odbc_table);
-        odbc_table = db.getTableNullable("odbc_table");
-        Assert.assertNull(odbc_table);
+        odbcTable = db.getTableNullable("oracle_table");
+        Assert.assertNotNull(odbcTable);
+        odbcTable = db.getTableNullable("odbc_table");
+        Assert.assertNull(odbcTable);
     }
 
... 2810 lines suppressed ...


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@doris.apache.org
For additional commands, e-mail: commits-help@doris.apache.org