You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by fo...@apache.org on 2022/07/23 09:18:46 UTC

[hudi] branch master updated: [MINOR] Fix Call Procedure code style (#6186)

This is an automated email from the ASF dual-hosted git repository.

forwardxu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 859157ec01 [MINOR] Fix Call Procedure code style (#6186)
859157ec01 is described below

commit 859157ec01125ee98d2c38bbe767ab7eb0452924
Author: superche <73...@users.noreply.github.com>
AuthorDate: Sat Jul 23 17:18:38 2022 +0800

    [MINOR] Fix Call Procedure code style (#6186)
    
    * Fix Call Procedure code style.
    Co-authored-by: superche <su...@tencent.com>
---
 ...re.scala => CreateMetadataTableProcedure.scala} | 10 +++---
 ...cedure.scala => CreateSavepointProcedure.scala} | 12 ++++----
 .../command/procedures/DeleteMarkerProcedure.scala |  2 +-
 ...re.scala => DeleteMetadataTableProcedure.scala} | 10 +++---
 ...cedure.scala => DeleteSavepointProcedure.scala} | 10 +++---
 .../procedures/ExportInstantsProcedure.scala       |  2 +-
 .../procedures/HdfsParquetImportProcedure.scala    | 14 ++++-----
 .../hudi/command/procedures/HoodieProcedures.scala | 20 ++++++------
 ...dure.scala => InitMetadataTableProcedure.scala} | 12 ++++----
 ...re.scala => RollbackToSavepointProcedure.scala} | 10 +++---
 .../command/procedures/RunBootstrapProcedure.scala | 30 +++++++++---------
 .../command/procedures/RunCleanProcedure.scala     |  8 ++---
 .../procedures/ShowArchivedCommitsProcedure.scala  |  6 ++--
 .../procedures/ShowBootstrapMappingProcedure.scala | 10 +++---
 .../procedures/ShowCommitFilesProcedure.scala      |  2 +-
 .../command/procedures/ShowCommitsProcedure.scala  |  2 +-
 .../procedures/ShowFileSystemViewProcedure.scala   |  2 +-
 ...scala => ShowMetadataTableFilesProcedure.scala} | 21 ++++++++-----
 ... => ShowMetadataTablePartitionsProcedure.scala} | 10 +++---
 ...scala => ShowMetadataTableStatsProcedure.scala} | 10 +++---
 .../procedures/StatsFileSizeProcedure.scala        |  4 +--
 .../procedures/UpgradeOrDowngradeProcedure.scala   |  2 +-
 ...a => ValidateMetadataTableFilesProcedure.scala} | 10 +++---
 .../hudi/procedure/TestBootstrapProcedure.scala    | 12 ++++----
 .../sql/hudi/procedure/TestCallProcedure.scala     |  4 +--
 .../sql/hudi/procedure/TestCleanProcedure.scala    |  2 +-
 .../sql/hudi/procedure/TestCommitsProcedure.scala  |  4 +--
 .../procedure/TestExportInstantsProcedure.scala    |  2 +-
 .../procedure/TestHdfsParquetImportProcedure.scala | 20 ++++++------
 .../sql/hudi/procedure/TestMetadataProcedure.scala | 36 +++++++++++-----------
 .../hudi/procedure/TestSavepointsProcedure.scala   | 18 +++++------
 .../sql/hudi/procedure/TestStatsProcedure.scala    |  6 ++--
 .../TestUpgradeOrDowngradeProcedure.scala          |  6 ++--
 33 files changed, 167 insertions(+), 162 deletions(-)

diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataCreateProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateMetadataTableProcedure.scala
similarity index 90%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataCreateProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateMetadataTableProcedure.scala
index e20459ea36..3a16d8319a 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataCreateProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateMetadataTableProcedure.scala
@@ -29,7 +29,7 @@ import org.apache.spark.sql.types._
 import java.io.FileNotFoundException
 import java.util.function.Supplier
 
-class MetadataCreateProcedure extends BaseProcedure with ProcedureBuilder with SparkAdapterSupport {
+class CreateMetadataTableProcedure extends BaseProcedure with ProcedureBuilder with SparkAdapterSupport {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None)
   )
@@ -67,14 +67,14 @@ class MetadataCreateProcedure extends BaseProcedure with ProcedureBuilder with S
     Seq(Row("Created Metadata Table in " +  metadataPath + " (duration=" + timer.endTimer / 1000.0 + "secs)"))
   }
 
-  override def build = new MetadataCreateProcedure()
+  override def build = new CreateMetadataTableProcedure()
 }
 
-object MetadataCreateProcedure {
-  val NAME = "metadata_create"
+object CreateMetadataTableProcedure {
+  val NAME = "create_metadata_table"
   var metadataBaseDirectory: Option[String] = None
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new MetadataCreateProcedure()
+    override def get() = new CreateMetadataTableProcedure()
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateSavepointsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateSavepointProcedure.scala
similarity index 89%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateSavepointsProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateSavepointProcedure.scala
index 43098d1e98..e81b6f086a 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateSavepointsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CreateSavepointProcedure.scala
@@ -26,10 +26,10 @@ import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType}
 
 import java.util.function.Supplier
 
-class CreateSavepointsProcedure extends BaseProcedure with ProcedureBuilder with Logging {
+class CreateSavepointProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.required(1, "commit_Time", DataTypes.StringType, None),
+    ProcedureParameter.required(1, "commit_time", DataTypes.StringType, None),
     ProcedureParameter.optional(2, "user", DataTypes.StringType, ""),
     ProcedureParameter.optional(3, "comments", DataTypes.StringType, "")
   )
@@ -75,14 +75,14 @@ class CreateSavepointsProcedure extends BaseProcedure with ProcedureBuilder with
     Seq(Row(result))
   }
 
-  override def build: Procedure = new CreateSavepointsProcedure()
+  override def build: Procedure = new CreateSavepointProcedure()
 }
 
-object CreateSavepointsProcedure {
-  val NAME: String = "create_savepoints"
+object CreateSavepointProcedure {
+  val NAME: String = "create_savepoint"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get(): CreateSavepointsProcedure = new CreateSavepointsProcedure()
+    override def get(): CreateSavepointProcedure = new CreateSavepointProcedure()
   }
 }
 
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMarkerProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMarkerProcedure.scala
index 8804d9fb5f..bfbab32599 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMarkerProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMarkerProcedure.scala
@@ -29,7 +29,7 @@ import scala.util.{Failure, Success, Try}
 class DeleteMarkerProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.required(1, "instant_Time", DataTypes.StringType, None)
+    ProcedureParameter.required(1, "instant_time", DataTypes.StringType, None)
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataDeleteProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMetadataTableProcedure.scala
similarity index 88%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataDeleteProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMetadataTableProcedure.scala
index 216a365117..d6fccc1f9d 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataDeleteProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteMetadataTableProcedure.scala
@@ -27,7 +27,7 @@ import org.apache.spark.sql.types._
 import java.io.FileNotFoundException
 import java.util.function.Supplier
 
-class MetadataDeleteProcedure extends BaseProcedure with ProcedureBuilder with SparkAdapterSupport {
+class DeleteMetadataTableProcedure extends BaseProcedure with ProcedureBuilder with SparkAdapterSupport {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None)
   )
@@ -58,14 +58,14 @@ class MetadataDeleteProcedure extends BaseProcedure with ProcedureBuilder with S
     Seq(Row("Removed Metadata Table from " + metadataPath))
   }
 
-  override def build = new MetadataDeleteProcedure()
+  override def build = new DeleteMetadataTableProcedure()
 }
 
-object MetadataDeleteProcedure {
-  val NAME = "metadata_delete"
+object DeleteMetadataTableProcedure {
+  val NAME = "delete_metadata_table"
   var metadataBaseDirectory: Option[String] = None
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new MetadataDeleteProcedure()
+    override def get() = new DeleteMetadataTableProcedure()
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteSavepointsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteSavepointProcedure.scala
similarity index 90%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteSavepointsProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteSavepointProcedure.scala
index fcef175ebd..1cdd0638f1 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteSavepointsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/DeleteSavepointProcedure.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType}
 
 import java.util.function.Supplier
 
-class DeleteSavepointsProcedure extends BaseProcedure with ProcedureBuilder with Logging {
+class DeleteSavepointProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
     ProcedureParameter.required(1, "instant_time", DataTypes.StringType, None)
@@ -74,14 +74,14 @@ class DeleteSavepointsProcedure extends BaseProcedure with ProcedureBuilder with
     Seq(Row(result))
   }
 
-  override def build: Procedure = new DeleteSavepointsProcedure()
+  override def build: Procedure = new DeleteSavepointProcedure()
 }
 
-object DeleteSavepointsProcedure {
-  val NAME: String = "delete_savepoints"
+object DeleteSavepointProcedure {
+  val NAME: String = "delete_savepoint"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get(): DeleteSavepointsProcedure = new DeleteSavepointsProcedure()
+    override def get(): DeleteSavepointProcedure = new DeleteSavepointProcedure()
   }
 }
 
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
index ff6ab92179..cf400dd6d5 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala
@@ -49,7 +49,7 @@ class ExportInstantsProcedure extends BaseProcedure with ProcedureBuilder with L
 
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.required(1, "localFolder", DataTypes.StringType, None),
+    ProcedureParameter.required(1, "local_folder", DataTypes.StringType, None),
     ProcedureParameter.optional(2, "limit", DataTypes.IntegerType, -1),
     ProcedureParameter.optional(3, "actions", DataTypes.StringType, defaultActions),
     ProcedureParameter.optional(4, "desc", DataTypes.BooleanType, false)
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HdfsParquetImportProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HdfsParquetImportProcedure.scala
index 1589d230ce..ad947f745a 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HdfsParquetImportProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HdfsParquetImportProcedure.scala
@@ -28,17 +28,17 @@ import scala.language.higherKinds
 class HdfsParquetImportProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.required(1, "tableType", DataTypes.StringType, None),
-    ProcedureParameter.required(2, "srcPath", DataTypes.StringType, None),
-    ProcedureParameter.required(3, "targetPath", DataTypes.StringType, None),
-    ProcedureParameter.required(4, "rowKey", DataTypes.StringType, None),
-    ProcedureParameter.required(5, "partitionKey", DataTypes.StringType, None),
-    ProcedureParameter.required(6, "schemaFilePath", DataTypes.StringType, None),
+    ProcedureParameter.required(1, "table_type", DataTypes.StringType, None),
+    ProcedureParameter.required(2, "src_path", DataTypes.StringType, None),
+    ProcedureParameter.required(3, "target_path", DataTypes.StringType, None),
+    ProcedureParameter.required(4, "row_key", DataTypes.StringType, None),
+    ProcedureParameter.required(5, "partition_key", DataTypes.StringType, None),
+    ProcedureParameter.required(6, "schema_file_path", DataTypes.StringType, None),
     ProcedureParameter.optional(7, "format", DataTypes.StringType, "parquet"),
     ProcedureParameter.optional(8, "command", DataTypes.StringType, "insert"),
     ProcedureParameter.optional(9, "retry", DataTypes.IntegerType, 0),
     ProcedureParameter.optional(10, "parallelism", DataTypes.IntegerType, jsc.defaultParallelism),
-    ProcedureParameter.optional(11, "propsFilePath", DataTypes.StringType, "")
+    ProcedureParameter.optional(11, "props_file_path", DataTypes.StringType, "")
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HoodieProcedures.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HoodieProcedures.scala
index 1a9404d265..6f6435865e 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HoodieProcedures.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HoodieProcedures.scala
@@ -35,9 +35,9 @@ object HoodieProcedures {
     val mapBuilder: ImmutableMap.Builder[String, Supplier[ProcedureBuilder]] = ImmutableMap.builder()
     mapBuilder.put(RunCompactionProcedure.NAME, RunCompactionProcedure.builder)
     mapBuilder.put(ShowCompactionProcedure.NAME, ShowCompactionProcedure.builder)
-    mapBuilder.put(CreateSavepointsProcedure.NAME, CreateSavepointsProcedure.builder)
-    mapBuilder.put(DeleteSavepointsProcedure.NAME, DeleteSavepointsProcedure.builder)
-    mapBuilder.put(RollbackSavepointsProcedure.NAME, RollbackSavepointsProcedure.builder)
+    mapBuilder.put(CreateSavepointProcedure.NAME, CreateSavepointProcedure.builder)
+    mapBuilder.put(DeleteSavepointProcedure.NAME, DeleteSavepointProcedure.builder)
+    mapBuilder.put(RollbackToSavepointProcedure.NAME, RollbackToSavepointProcedure.builder)
     mapBuilder.put(RollbackToInstantTimeProcedure.NAME, RollbackToInstantTimeProcedure.builder)
     mapBuilder.put(RunClusteringProcedure.NAME, RunClusteringProcedure.builder)
     mapBuilder.put(ShowClusteringProcedure.NAME, ShowClusteringProcedure.builder)
@@ -66,13 +66,13 @@ object HoodieProcedures {
     mapBuilder.put(ShowBootstrapPartitionsProcedure.NAME, ShowBootstrapPartitionsProcedure.builder)
     mapBuilder.put(UpgradeTableProcedure.NAME, UpgradeTableProcedure.builder)
     mapBuilder.put(DowngradeTableProcedure.NAME, DowngradeTableProcedure.builder)
-    mapBuilder.put(ListMetadataFilesProcedure.NAME, ListMetadataFilesProcedure.builder)
-    mapBuilder.put(ListMetadataPartitionsProcedure.NAME, ListMetadataPartitionsProcedure.builder)
-    mapBuilder.put(MetadataCreateProcedure.NAME, MetadataCreateProcedure.builder)
-    mapBuilder.put(MetadataDeleteProcedure.NAME, MetadataDeleteProcedure.builder)
-    mapBuilder.put(MetadataInitProcedure.NAME, MetadataInitProcedure.builder)
-    mapBuilder.put(ShowMetadataStatsProcedure.NAME, ShowMetadataStatsProcedure.builder)
-    mapBuilder.put(ValidateMetadataFilesProcedure.NAME, ValidateMetadataFilesProcedure.builder)
+    mapBuilder.put(ShowMetadataTableFilesProcedure.NAME, ShowMetadataTableFilesProcedure.builder)
+    mapBuilder.put(ShowMetadataTablePartitionsProcedure.NAME, ShowMetadataTablePartitionsProcedure.builder)
+    mapBuilder.put(CreateMetadataTableProcedure.NAME, CreateMetadataTableProcedure.builder)
+    mapBuilder.put(DeleteMetadataTableProcedure.NAME, DeleteMetadataTableProcedure.builder)
+    mapBuilder.put(InitMetadataTableProcedure.NAME, InitMetadataTableProcedure.builder)
+    mapBuilder.put(ShowMetadataTableStatsProcedure.NAME, ShowMetadataTableStatsProcedure.builder)
+    mapBuilder.put(ValidateMetadataTableFilesProcedure.NAME, ValidateMetadataTableFilesProcedure.builder)
     mapBuilder.put(ShowFsPathDetailProcedure.NAME, ShowFsPathDetailProcedure.builder)
     mapBuilder.put(CopyToTableProcedure.NAME, CopyToTableProcedure.builder)
     mapBuilder.put(RepairAddpartitionmetaProcedure.NAME, RepairAddpartitionmetaProcedure.builder)
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataInitProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/InitMetadataTableProcedure.scala
similarity index 89%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataInitProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/InitMetadataTableProcedure.scala
index acd1532a97..73d1128a98 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/MetadataInitProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/InitMetadataTableProcedure.scala
@@ -30,10 +30,10 @@ import org.apache.spark.sql.types._
 import java.io.FileNotFoundException
 import java.util.function.Supplier
 
-class MetadataInitProcedure extends BaseProcedure with ProcedureBuilder with SparkAdapterSupport with Logging {
+class InitMetadataTableProcedure extends BaseProcedure with ProcedureBuilder with SparkAdapterSupport with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.optional(1, "readOnly", DataTypes.BooleanType, false)
+    ProcedureParameter.optional(1, "read_only", DataTypes.BooleanType, false)
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
@@ -71,14 +71,14 @@ class MetadataInitProcedure extends BaseProcedure with ProcedureBuilder with Spa
     Seq(Row(action + " Metadata Table in " + metadataPath + " (duration=" + timer.endTimer / 1000.0 + "sec)"))
   }
 
-  override def build = new MetadataInitProcedure()
+  override def build = new InitMetadataTableProcedure()
 }
 
-object MetadataInitProcedure {
-  val NAME = "metadata_init"
+object InitMetadataTableProcedure {
+  val NAME = "init_metadata_table"
   var metadataBaseDirectory: Option[String] = None
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new MetadataInitProcedure()
+    override def get() = new InitMetadataTableProcedure()
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackSavepointsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToSavepointProcedure.scala
similarity index 90%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackSavepointsProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToSavepointProcedure.scala
index fa8f33d306..11f06d4a7c 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackSavepointsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToSavepointProcedure.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType}
 
 import java.util.function.Supplier
 
-class RollbackSavepointsProcedure extends BaseProcedure with ProcedureBuilder with Logging {
+class RollbackToSavepointProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
     ProcedureParameter.required(1, "instant_time", DataTypes.StringType, None)
@@ -74,14 +74,14 @@ class RollbackSavepointsProcedure extends BaseProcedure with ProcedureBuilder wi
     Seq(Row(result))
   }
 
-  override def build: Procedure = new RollbackSavepointsProcedure()
+  override def build: Procedure = new RollbackToSavepointProcedure()
 }
 
-object RollbackSavepointsProcedure {
-  val NAME: String = "rollback_savepoints"
+object RollbackToSavepointProcedure {
+  val NAME: String = "rollback_to_savepoint"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get(): RollbackSavepointsProcedure = new RollbackSavepointsProcedure()
+    override def get(): RollbackToSavepointProcedure = new RollbackToSavepointProcedure()
   }
 }
 
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunBootstrapProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunBootstrapProcedure.scala
index 8e6fd36a8f..de64650bfd 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunBootstrapProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunBootstrapProcedure.scala
@@ -37,22 +37,22 @@ import java.util.function.Supplier
 class RunBootstrapProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.required(1, "tableType", DataTypes.StringType, None),
-    ProcedureParameter.required(2, "bootstrapPath", DataTypes.StringType, None),
-    ProcedureParameter.required(3, "basePath", DataTypes.StringType, None),
-    ProcedureParameter.required(4, "rowKeyField", DataTypes.StringType, None),
-    ProcedureParameter.optional(5, "baseFileFormat", DataTypes.StringType, "PARQUET"),
-    ProcedureParameter.optional(6, "partitionPathField", DataTypes.StringType, ""),
-    ProcedureParameter.optional(7, "bootstrapIndexClass", DataTypes.StringType, "org.apache.hudi.common.bootstrap.index.HFileBootstrapIndex"),
-    ProcedureParameter.optional(8, "selectorClass", DataTypes.StringType, "org.apache.hudi.client.bootstrap.selector.MetadataOnlyBootstrapModeSelector"),
-    ProcedureParameter.optional(9, "keyGeneratorClass", DataTypes.StringType, "org.apache.hudi.keygen.SimpleKeyGenerator"),
-    ProcedureParameter.optional(10, "fullBootstrapInputProvider", DataTypes.StringType, "org.apache.hudi.bootstrap.SparkParquetBootstrapDataProvider"),
-    ProcedureParameter.optional(11, "schemaProviderClass", DataTypes.StringType, ""),
-    ProcedureParameter.optional(12, "payloadClass", DataTypes.StringType, "org.apache.hudi.common.model.OverwriteWithLatestAvroPayload"),
+    ProcedureParameter.required(1, "table_type", DataTypes.StringType, None),
+    ProcedureParameter.required(2, "bootstrap_path", DataTypes.StringType, None),
+    ProcedureParameter.required(3, "base_path", DataTypes.StringType, None),
+    ProcedureParameter.required(4, "rowKey_field", DataTypes.StringType, None),
+    ProcedureParameter.optional(5, "base_file_format", DataTypes.StringType, "PARQUET"),
+    ProcedureParameter.optional(6, "partition_path_field", DataTypes.StringType, ""),
+    ProcedureParameter.optional(7, "bootstrap_index_class", DataTypes.StringType, "org.apache.hudi.common.bootstrap.index.HFileBootstrapIndex"),
+    ProcedureParameter.optional(8, "selector_class", DataTypes.StringType, "org.apache.hudi.client.bootstrap.selector.MetadataOnlyBootstrapModeSelector"),
+    ProcedureParameter.optional(9, "key_generator_glass", DataTypes.StringType, "org.apache.hudi.keygen.SimpleKeyGenerator"),
+    ProcedureParameter.optional(10, "full_bootstrap_input_provider", DataTypes.StringType, "org.apache.hudi.bootstrap.SparkParquetBootstrapDataProvider"),
+    ProcedureParameter.optional(11, "schema_provider_class", DataTypes.StringType, ""),
+    ProcedureParameter.optional(12, "payload_class", DataTypes.StringType, "org.apache.hudi.common.model.OverwriteWithLatestAvroPayload"),
     ProcedureParameter.optional(13, "parallelism", DataTypes.IntegerType, 1500),
-    ProcedureParameter.optional(14, "enableHiveSync", DataTypes.BooleanType, false),
-    ProcedureParameter.optional(15, "propsFilePath", DataTypes.StringType, ""),
-    ProcedureParameter.optional(16, "bootstrapOverwrite", DataTypes.BooleanType, false)
+    ProcedureParameter.optional(14, "enable_hive_sync", DataTypes.BooleanType, false),
+    ProcedureParameter.optional(15, "props_file_path", DataTypes.StringType, ""),
+    ProcedureParameter.optional(16, "bootstrap_overwrite", DataTypes.BooleanType, false)
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunCleanProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunCleanProcedure.scala
index 6e3d2e9dcb..b5d942d5e6 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunCleanProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RunCleanProcedure.scala
@@ -30,10 +30,10 @@ class RunCleanProcedure extends BaseProcedure with ProcedureBuilder with Logging
 
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.optional(1, "skipLocking", DataTypes.BooleanType, false),
-    ProcedureParameter.optional(2, "scheduleInLine", DataTypes.BooleanType, true),
-    ProcedureParameter.optional(3, "cleanPolicy", DataTypes.StringType, None),
-    ProcedureParameter.optional(4, "retainCommits", DataTypes.IntegerType, 10)
+    ProcedureParameter.optional(1, "skip_locking", DataTypes.BooleanType, false),
+    ProcedureParameter.optional(2, "schedule_in_line", DataTypes.BooleanType, true),
+    ProcedureParameter.optional(3, "clean_policy", DataTypes.StringType, None),
+    ProcedureParameter.optional(4, "retain_commits", DataTypes.IntegerType, 10)
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
index 12bc85f040..957dfbe8bf 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala
@@ -36,8 +36,8 @@ class ShowArchivedCommitsProcedure(includeExtraMetadata: Boolean) extends BasePr
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
     ProcedureParameter.optional(1, "limit", DataTypes.IntegerType, 10),
-    ProcedureParameter.optional(2, "startTs", DataTypes.StringType, ""),
-    ProcedureParameter.optional(3, "endTs", DataTypes.StringType, "")
+    ProcedureParameter.optional(2, "start_ts", DataTypes.StringType, ""),
+    ProcedureParameter.optional(3, "end_ts", DataTypes.StringType, "")
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
@@ -63,7 +63,7 @@ class ShowArchivedCommitsProcedure(includeExtraMetadata: Boolean) extends BasePr
     StructField("num_update_writes", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_errors", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_log_blocks", DataTypes.LongType, nullable = true, Metadata.empty),
-    StructField("total_corrupt_logblocks", DataTypes.LongType, nullable = true, Metadata.empty),
+    StructField("total_corrupt_log_blocks", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_rollback_blocks", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_log_records", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_updated_records_compacted", DataTypes.LongType, nullable = true, Metadata.empty),
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowBootstrapMappingProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowBootstrapMappingProcedure.scala
index dab3891686..33070554f2 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowBootstrapMappingProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowBootstrapMappingProcedure.scala
@@ -33,17 +33,17 @@ import scala.collection.JavaConverters._
 class ShowBootstrapMappingProcedure extends BaseProcedure with ProcedureBuilder {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.optional(1, "partitionPath", DataTypes.StringType, ""),
-    ProcedureParameter.optional(2, "fileIds", DataTypes.StringType, ""),
+    ProcedureParameter.optional(1, "partition_path", DataTypes.StringType, ""),
+    ProcedureParameter.optional(2, "file_ids", DataTypes.StringType, ""),
     ProcedureParameter.optional(3, "limit", DataTypes.IntegerType, 10),
-    ProcedureParameter.optional(4, "sortBy", DataTypes.StringType, "partition"),
+    ProcedureParameter.optional(4, "sort_by", DataTypes.StringType, "partition"),
     ProcedureParameter.optional(5, "desc", DataTypes.BooleanType, false)
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
     StructField("partition", DataTypes.StringType, nullable = true, Metadata.empty),
-    StructField("fileid", DataTypes.StringType, nullable = true, Metadata.empty),
-    StructField("source_basepath", DataTypes.StringType, nullable = true, Metadata.empty),
+    StructField("file_id", DataTypes.StringType, nullable = true, Metadata.empty),
+    StructField("source_base_path", DataTypes.StringType, nullable = true, Metadata.empty),
     StructField("source_partition", DataTypes.StringType, nullable = true, Metadata.empty),
     StructField("source_file", DataTypes.StringType, nullable = true, Metadata.empty))
   )
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
index 9fea4a18fe..d4581be7f4 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala
@@ -44,7 +44,7 @@ class ShowCommitFilesProcedure() extends BaseProcedure with ProcedureBuilder {
     StructField("file_id", DataTypes.StringType, nullable = true, Metadata.empty),
     StructField("previous_commit", DataTypes.StringType, nullable = true, Metadata.empty),
     StructField("total_records_updated", DataTypes.LongType, nullable = true, Metadata.empty),
-    StructField("total_tecords_written", DataTypes.LongType, nullable = true, Metadata.empty),
+    StructField("total_records_written", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_bytes_written", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_errors", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("file_size", DataTypes.LongType, nullable = true, Metadata.empty)
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
index 920e3a2c16..1dc395ad27 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala
@@ -61,7 +61,7 @@ class ShowCommitsProcedure(includeExtraMetadata: Boolean) extends BaseProcedure
     StructField("num_update_writes", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_errors", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_log_blocks", DataTypes.LongType, nullable = true, Metadata.empty),
-    StructField("total_corrupt_logblocks", DataTypes.LongType, nullable = true, Metadata.empty),
+    StructField("total_corrupt_log_blocks", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_rollback_blocks", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_log_records", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("total_updated_records_compacted", DataTypes.LongType, nullable = true, Metadata.empty),
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowFileSystemViewProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowFileSystemViewProcedure.scala
index 8c861cf0f6..1b67f26794 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowFileSystemViewProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowFileSystemViewProcedure.scala
@@ -37,7 +37,7 @@ class ShowFileSystemViewProcedure(showLatest: Boolean) extends BaseProcedure wit
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
     ProcedureParameter.optional(1, "max_instant", DataTypes.StringType, ""),
     ProcedureParameter.optional(2, "include_max", DataTypes.BooleanType, false),
-    ProcedureParameter.optional(3, "include_inflight", DataTypes.BooleanType, false),
+    ProcedureParameter.optional(3, "include_in_flight", DataTypes.BooleanType, false),
     ProcedureParameter.optional(4, "exclude_compaction", DataTypes.BooleanType, false),
     ProcedureParameter.optional(5, "limit", DataTypes.IntegerType, 10),
     ProcedureParameter.optional(6, "path_regex", DataTypes.StringType, "*/*/*")
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ListMetadataFilesProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTableFilesProcedure.scala
similarity index 83%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ListMetadataFilesProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTableFilesProcedure.scala
index 591293e08d..b30203dc06 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ListMetadataFilesProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTableFilesProcedure.scala
@@ -21,7 +21,7 @@ import org.apache.hadoop.fs.{FileStatus, Path}
 import org.apache.hudi.common.config.HoodieMetadataConfig
 import org.apache.hudi.common.engine.HoodieLocalEngineContext
 import org.apache.hudi.common.table.HoodieTableMetaClient
-import org.apache.hudi.common.util.HoodieTimer
+import org.apache.hudi.common.util.{HoodieTimer, StringUtils}
 import org.apache.hudi.exception.HoodieException
 import org.apache.hudi.metadata.HoodieBackedTableMetadata
 import org.apache.spark.internal.Logging
@@ -31,10 +31,10 @@ import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType}
 import java.util
 import java.util.function.Supplier
 
-class ListMetadataFilesProcedure() extends BaseProcedure with ProcedureBuilder with Logging {
+class ShowMetadataTableFilesProcedure() extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.optional(1, "partition", DataTypes.StringType, None)
+    ProcedureParameter.optional(1, "partition", DataTypes.StringType, "")
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
@@ -60,8 +60,13 @@ class ListMetadataFilesProcedure() extends BaseProcedure with ProcedureBuilder w
       throw new HoodieException(s"Metadata Table not enabled/initialized.")
     }
 
+    var partitionPath = new Path(basePath)
+    if (!StringUtils.isNullOrEmpty(partition)) {
+      partitionPath = new Path(basePath, partition)
+    }
+
     val timer = new HoodieTimer().startTimer
-    val statuses = metaReader.getAllFilesInPartition(new Path(basePath, partition))
+    val statuses = metaReader.getAllFilesInPartition(partitionPath)
     logDebug("Took " + timer.endTimer + " ms")
 
     val rows = new util.ArrayList[Row]
@@ -71,13 +76,13 @@ class ListMetadataFilesProcedure() extends BaseProcedure with ProcedureBuilder w
     rows.stream().toArray().map(r => r.asInstanceOf[Row]).toList
   }
 
-  override def build: Procedure = new ListMetadataFilesProcedure()
+  override def build: Procedure = new ShowMetadataTableFilesProcedure()
 }
 
-object ListMetadataFilesProcedure {
-  val NAME = "list_metadata_files"
+object ShowMetadataTableFilesProcedure {
+  val NAME = "show_metadata_table_files"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new ListMetadataFilesProcedure()
+    override def get() = new ShowMetadataTableFilesProcedure()
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ListMetadataPartitionsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTablePartitionsProcedure.scala
similarity index 89%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ListMetadataPartitionsProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTablePartitionsProcedure.scala
index 4c0bf15d90..f2eaa7ad83 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ListMetadataPartitionsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTablePartitionsProcedure.scala
@@ -31,7 +31,7 @@ import java.util.Collections
 import java.util.function.Supplier
 import scala.collection.JavaConverters.asScalaIteratorConverter
 
-class ListMetadataPartitionsProcedure() extends BaseProcedure with ProcedureBuilder with Logging {
+class ShowMetadataTablePartitionsProcedure() extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None)
   )
@@ -69,13 +69,13 @@ class ListMetadataPartitionsProcedure() extends BaseProcedure with ProcedureBuil
     rows.stream().toArray().map(r => r.asInstanceOf[Row]).toList
   }
 
-  override def build: Procedure = new ListMetadataPartitionsProcedure()
+  override def build: Procedure = new ShowMetadataTablePartitionsProcedure()
 }
 
-object ListMetadataPartitionsProcedure {
-  val NAME = "list_metadata_partitions"
+object ShowMetadataTablePartitionsProcedure {
+  val NAME = "show_metadata_table_partitions"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new ListMetadataPartitionsProcedure()
+    override def get() = new ShowMetadataTablePartitionsProcedure()
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataStatsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTableStatsProcedure.scala
similarity index 89%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataStatsProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTableStatsProcedure.scala
index 9a73a51fd1..948e441858 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataStatsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowMetadataTableStatsProcedure.scala
@@ -28,7 +28,7 @@ import java.util
 import java.util.function.Supplier
 import scala.collection.JavaConversions._
 
-class ShowMetadataStatsProcedure() extends BaseProcedure with ProcedureBuilder {
+class ShowMetadataTableStatsProcedure() extends BaseProcedure with ProcedureBuilder {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None)
   )
@@ -61,14 +61,14 @@ class ShowMetadataStatsProcedure() extends BaseProcedure with ProcedureBuilder {
     rows.stream().toArray().map(r => r.asInstanceOf[Row]).toList
   }
 
-  override def build: Procedure = new ShowMetadataStatsProcedure()
+  override def build: Procedure = new ShowMetadataTableStatsProcedure()
 }
 
-object ShowMetadataStatsProcedure {
-  val NAME = "show_metadata_stats"
+object ShowMetadataTableStatsProcedure {
+  val NAME = "show_metadata_table_stats"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new ShowMetadataStatsProcedure()
+    override def get() = new ShowMetadataTableStatsProcedure()
   }
 }
 
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/StatsFileSizeProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/StatsFileSizeProcedure.scala
index e0ece6e086..fcd529cbab 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/StatsFileSizeProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/StatsFileSizeProcedure.scala
@@ -46,7 +46,7 @@ class StatsFileSizeProcedure extends BaseProcedure with ProcedureBuilder {
     StructField("95th", DataTypes.DoubleType, nullable = true, Metadata.empty),
     StructField("max", DataTypes.LongType, nullable = true, Metadata.empty),
     StructField("num_files", DataTypes.IntegerType, nullable = true, Metadata.empty),
-    StructField("stddev", DataTypes.DoubleType, nullable = true, Metadata.empty)
+    StructField("std_dev", DataTypes.DoubleType, nullable = true, Metadata.empty)
   ))
 
   override def call(args: ProcedureArgs): Seq[Row] = {
@@ -100,7 +100,7 @@ class StatsFileSizeProcedure extends BaseProcedure with ProcedureBuilder {
 
 object StatsFileSizeProcedure {
   val MAX_FILES = 1000000
-  val NAME = "stats_filesizes"
+  val NAME = "stats_file_sizes"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
     override def get(): ProcedureBuilder = new StatsFileSizeProcedure()
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/UpgradeOrDowngradeProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/UpgradeOrDowngradeProcedure.scala
index 9eeef164a1..49cbe5e2de 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/UpgradeOrDowngradeProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/UpgradeOrDowngradeProcedure.scala
@@ -35,7 +35,7 @@ import scala.util.{Failure, Success, Try}
 class UpgradeOrDowngradeProcedure extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
-    ProcedureParameter.required(1, "toVersion", DataTypes.StringType, None)
+    ProcedureParameter.required(1, "to_version", DataTypes.StringType, None)
   )
 
   private val OUTPUT_TYPE = new StructType(Array[StructField](
diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ValidateMetadataFilesProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ValidateMetadataTableFilesProcedure.scala
similarity index 95%
rename from hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ValidateMetadataFilesProcedure.scala
rename to hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ValidateMetadataTableFilesProcedure.scala
index b3c125942a..81540d9684 100644
--- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ValidateMetadataFilesProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ValidateMetadataTableFilesProcedure.scala
@@ -34,7 +34,7 @@ import java.util.function.Supplier
 import scala.collection.JavaConversions._
 import scala.collection.JavaConverters.asScalaIteratorConverter
 
-class ValidateMetadataFilesProcedure() extends BaseProcedure with ProcedureBuilder with Logging {
+class ValidateMetadataTableFilesProcedure() extends BaseProcedure with ProcedureBuilder with Logging {
   private val PARAMETERS = Array[ProcedureParameter](
     ProcedureParameter.required(0, "table", DataTypes.StringType, None),
     ProcedureParameter.optional(1, "verbose", DataTypes.BooleanType, false)
@@ -135,13 +135,13 @@ class ValidateMetadataFilesProcedure() extends BaseProcedure with ProcedureBuild
     rows.stream().toArray().map(r => r.asInstanceOf[Row]).toList
   }
 
-  override def build: Procedure = new ValidateMetadataFilesProcedure()
+  override def build: Procedure = new ValidateMetadataTableFilesProcedure()
 }
 
-object ValidateMetadataFilesProcedure {
-  val NAME = "validate_metadata_files"
+object ValidateMetadataTableFilesProcedure {
+  val NAME = "validate_metadata_table_files"
 
   def builder: Supplier[ProcedureBuilder] = new Supplier[ProcedureBuilder] {
-    override def get() = new ValidateMetadataFilesProcedure()
+    override def get() = new ValidateMetadataTableFilesProcedure()
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
index 931d313013..f1e15a88c2 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala
@@ -55,12 +55,12 @@ class TestBootstrapProcedure extends HoodieSparkSqlTestBase {
       checkAnswer(
         s"""call run_bootstrap(
            |table => '$tableName',
-           |basePath => '$tablePath',
-           |tableType => '${HoodieTableType.COPY_ON_WRITE.name}',
-           |bootstrapPath => '$sourcePath',
-           |rowKeyField => '$RECORD_KEY_FIELD',
-           |partitionPathField => '$PARTITION_FIELD',
-           |bootstrapOverwrite => true)""".stripMargin) {
+           |base_path => '$tablePath',
+           |table_type => '${HoodieTableType.COPY_ON_WRITE.name}',
+           |bootstrap_path => '$sourcePath',
+           |rowKey_field => '$RECORD_KEY_FIELD',
+           |partition_path_field => '$PARTITION_FIELD',
+           |bootstrap_overwrite => true)""".stripMargin) {
         Seq(0)
       }
 
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
index 02b0e930fb..3bd7b01673 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala
@@ -156,7 +156,7 @@ class TestCallProcedure extends HoodieSparkSqlTestBase {
 
       // Check required fields
       checkExceptionContain(s"""call delete_marker(table => '$tableName')""")(
-        s"Argument: instant_Time is required")
+        s"Argument: instant_time is required")
 
       val instantTime = "101"
       FileCreateUtils.createMarkerFile(tablePath, "", instantTime, "f0", IOType.APPEND)
@@ -164,7 +164,7 @@ class TestCallProcedure extends HoodieSparkSqlTestBase {
         FileCreateUtils.getTotalMarkerFileCount(tablePath, "", instantTime, IOType.APPEND)
       }
 
-      checkAnswer(s"""call delete_marker(table => '$tableName', instant_Time => '$instantTime')""")(Seq(true))
+      checkAnswer(s"""call delete_marker(table => '$tableName', instant_time => '$instantTime')""")(Seq(true))
 
       assertResult(0) {
         FileCreateUtils.getTotalMarkerFileCount(tablePath, "", instantTime, IOType.APPEND)
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
index e0d61cbb07..316dccca52 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala
@@ -48,7 +48,7 @@ class TestCleanProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"update $tableName set price = 12 where id = 1")
       spark.sql(s"update $tableName set price = 13 where id = 1")
 
-      val result1 = spark.sql(s"call run_clean(table => '$tableName', retainCommits => 1)")
+      val result1 = spark.sql(s"call run_clean(table => '$tableName', retain_commits => 1)")
         .collect()
         .map(row => Seq(row.getString(0), row.getLong(1), row.getInt(2), row.getString(3), row.getString(4), row.getInt(5)))
 
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
index 750b3943ad..2539ff7c36 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala
@@ -62,7 +62,7 @@ class TestCommitsProcedure extends HoodieSparkSqlTestBase {
 
       // collect archived commits for table
       val endTs = commits(0).get(0).toString
-      val archivedCommits = spark.sql(s"""call show_archived_commits(table => '$tableName', endTs => '$endTs')""").collect()
+      val archivedCommits = spark.sql(s"""call show_archived_commits(table => '$tableName', end_ts => '$endTs')""").collect()
       assertResult(4) {
         archivedCommits.length
       }
@@ -110,7 +110,7 @@ class TestCommitsProcedure extends HoodieSparkSqlTestBase {
 
       // collect archived commits for table
       val endTs = commits(0).get(0).toString
-      val archivedCommits = spark.sql(s"""call show_archived_commits_metadata(table => '$tableName', endTs => '$endTs')""").collect()
+      val archivedCommits = spark.sql(s"""call show_archived_commits_metadata(table => '$tableName', end_ts => '$endTs')""").collect()
       assertResult(4) {
         archivedCommits.length
       }
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
index b234126930..cd4e3a7ac6 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala
@@ -43,7 +43,7 @@ class TestExportInstantsProcedure extends HoodieSparkSqlTestBase {
       // insert data to table
       spark.sql(s"insert into $tableName select 1, 'a1', 10, 1000")
 
-      val result = spark.sql(s"""call export_instants(table => '$tableName', localFolder => '${tmp.getCanonicalPath}/$tableName')""").limit(1).collect()
+      val result = spark.sql(s"""call export_instants(table => '$tableName', local_folder => '${tmp.getCanonicalPath}/$tableName')""").limit(1).collect()
       assertResult(1) {
         result.length
       }
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
index 90e6164c10..1a4d3e2e91 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala
@@ -56,15 +56,15 @@ class TestHdfsParquetImportProcedure extends HoodieSparkSqlTestBase {
       val insertData: util.List[GenericRecord] = createInsertRecords(sourcePath)
 
       // Check required fields
-      checkExceptionContain(s"""call hdfs_parquet_import(tableType => 'mor')""")(
+      checkExceptionContain(s"""call hdfs_parquet_import(table_type => 'mor')""")(
         s"Argument: table is required")
 
       checkAnswer(
         s"""call hdfs_parquet_import(
-           |table => '$tableName', tableType => '${HoodieTableType.COPY_ON_WRITE.name}',
-           |srcPath => '$sourcePath', targetPath => '$targetPath',
-           |rowKey => '_row_key', partitionKey => 'timestamp',
-           |schemaFilePath => '$schemaFile')""".stripMargin) {
+           |table => '$tableName', table_type => '${HoodieTableType.COPY_ON_WRITE.name}',
+           |src_path => '$sourcePath', target_path => '$targetPath',
+           |row_key => '_row_key', partition_key => 'timestamp',
+           |schema_file_path => '$schemaFile')""".stripMargin) {
         Seq(0)
       }
 
@@ -89,15 +89,15 @@ class TestHdfsParquetImportProcedure extends HoodieSparkSqlTestBase {
       val insertData: util.List[GenericRecord] = createUpsertRecords(sourcePath)
 
       // Check required fields
-      checkExceptionContain(s"""call hdfs_parquet_import(tableType => 'mor')""")(
+      checkExceptionContain(s"""call hdfs_parquet_import(table_type => 'mor')""")(
         s"Argument: table is required")
 
       checkAnswer(
         s"""call hdfs_parquet_import(
-           |table => '$tableName', tableType => '${HoodieTableType.COPY_ON_WRITE.name}',
-           |srcPath => '$sourcePath', targetPath => '$targetPath',
-           |rowKey => '_row_key', partitionKey => 'timestamp',
-           |schemaFilePath => '$schemaFile', command => 'upsert')""".stripMargin) {
+           |table => '$tableName', table_type => '${HoodieTableType.COPY_ON_WRITE.name}',
+           |src_path => '$sourcePath', target_path => '$targetPath',
+           |row_key => '_row_key', partition_key => 'timestamp',
+           |schema_file_path => '$schemaFile', command => 'upsert')""".stripMargin) {
         Seq(0)
       }
 
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
index 9dbb8f22ec..5a26aaa0cf 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
 class TestMetadataProcedure extends HoodieSparkSqlTestBase {
 
-  test("Test Call metadata_delete Procedure") {
+  test("Test Call delete_metadata_table Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -44,14 +44,14 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // delete the metadata
-      val deleteResult = spark.sql(s"""call metadata_delete(table => '$tableName')""").collect()
+      val deleteResult = spark.sql(s"""call delete_metadata_table(table => '$tableName')""").collect()
       assertResult(1) {
         deleteResult.length
       }
     }
   }
 
-  test("Test Call metadata_create Procedure") {
+  test("Test Call create_metadata_table Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -74,20 +74,20 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // The first step is delete the metadata
-      val deleteResult = spark.sql(s"""call metadata_delete(table => '$tableName')""").collect()
+      val deleteResult = spark.sql(s"""call delete_metadata_table(table => '$tableName')""").collect()
       assertResult(1) {
         deleteResult.length
       }
 
       // The second step is create the metadata
-      val createResult = spark.sql(s"""call metadata_create(table => '$tableName')""").collect()
+      val createResult = spark.sql(s"""call create_metadata_table(table => '$tableName')""").collect()
       assertResult(1) {
         createResult.length
       }
     }
   }
 
-  test("Test Call metadata_init Procedure") {
+  test("Test Call init_metadata_table Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -110,20 +110,20 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // read only, no initialize
-      val readResult = spark.sql(s"""call metadata_init(table => '$tableName', readOnly => true)""").collect()
+      val readResult = spark.sql(s"""call init_metadata_table(table => '$tableName', read_only => true)""").collect()
       assertResult(1) {
         readResult.length
       }
 
       // initialize metadata
-      val initResult = spark.sql(s"""call metadata_init(table => '$tableName')""").collect()
+      val initResult = spark.sql(s"""call init_metadata_table(table => '$tableName')""").collect()
       assertResult(1) {
         initResult.length
       }
     }
   }
 
-  test("Test Call show_metadata_stats Procedure") {
+  test("Test Call show_metadata_table_stats Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -147,14 +147,14 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // collect metadata stats for table
-      val metadataStats = spark.sql(s"""call show_metadata_stats(table => '$tableName')""").collect()
+      val metadataStats = spark.sql(s"""call show_metadata_table_stats(table => '$tableName')""").collect()
       assertResult(0) {
         metadataStats.length
       }
     }
   }
 
-  test("Test Call list_metadata_partitions Procedure") {
+  test("Test Call show_metadata_table_partitions Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -178,14 +178,14 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // collect metadata partitions for table
-      val partitions = spark.sql(s"""call list_metadata_partitions(table => '$tableName')""").collect()
+      val partitions = spark.sql(s"""call show_metadata_table_partitions(table => '$tableName')""").collect()
       assertResult(2) {
         partitions.length
       }
     }
   }
 
-  test("Test Call list_metadata_files Procedure") {
+  test("Test Call show_metadata_table_files Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -209,21 +209,21 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // collect metadata partitions for table
-      val partitions = spark.sql(s"""call list_metadata_partitions(table => '$tableName')""").collect()
+      val partitions = spark.sql(s"""call show_metadata_table_partitions(table => '$tableName')""").collect()
       assertResult(2) {
         partitions.length
       }
 
       // collect metadata files for a partition of a table
       val partition = partitions(0).get(0).toString
-      val filesResult = spark.sql(s"""call list_metadata_files(table => '$tableName', partition => '$partition')""").collect()
+      val filesResult = spark.sql(s"""call show_metadata_table_files(table => '$tableName', partition => '$partition')""").collect()
       assertResult(1) {
         filesResult.length
       }
     }
   }
 
-  test("Test Call validate_metadata_files Procedure") {
+  test("Test Call validate_metadata_table_files Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -247,13 +247,13 @@ class TestMetadataProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // collect validate metadata files result
-      val validateFilesResult = spark.sql(s"""call validate_metadata_files(table => '$tableName')""").collect()
+      val validateFilesResult = spark.sql(s"""call validate_metadata_table_files(table => '$tableName')""").collect()
       assertResult(0) {
         validateFilesResult.length
       }
 
       // collect validate metadata files result with verbose
-      val validateFilesVerboseResult = spark.sql(s"""call validate_metadata_files(table => '$tableName', verbose => true)""").collect()
+      val validateFilesVerboseResult = spark.sql(s"""call validate_metadata_table_files(table => '$tableName', verbose => true)""").collect()
       assertResult(2) {
         validateFilesVerboseResult.length
       }
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
index cfc5319c75..24036519cd 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase
 
 class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
 
-  test("Test Call create_savepoints Procedure") {
+  test("Test Call create_savepoint Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -49,7 +49,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
       }
 
       val commitTime = commits.apply(0).getString(0)
-      checkAnswer(s"""call create_savepoints('$tableName', '$commitTime', 'admin', '1')""")(Seq(true))
+      checkAnswer(s"""call create_savepoint('$tableName', '$commitTime', 'admin', '1')""")(Seq(true))
     }
   }
 
@@ -83,7 +83,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
       }
 
       val commitTime = commits.apply(1).getString(0)
-      checkAnswer(s"""call create_savepoints('$tableName', '$commitTime')""")(Seq(true))
+      checkAnswer(s"""call create_savepoint('$tableName', '$commitTime')""")(Seq(true))
 
       // show savepoints
       val savepoints = spark.sql(s"""call show_savepoints(table => '$tableName')""").collect()
@@ -93,7 +93,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
     }
   }
 
-  test("Test Call delete_savepoints Procedure") {
+  test("Test Call delete_savepoint Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -124,11 +124,11 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
 
       // create 3 savepoints
       commits.foreach(r => {
-        checkAnswer(s"""call create_savepoints('$tableName', '${r.getString(0)}')""")(Seq(true))
+        checkAnswer(s"""call create_savepoint('$tableName', '${r.getString(0)}')""")(Seq(true))
       })
 
       // delete savepoints
-      checkAnswer(s"""call delete_savepoints('$tableName', '${commits.apply(1).getString(0)}')""")(Seq(true))
+      checkAnswer(s"""call delete_savepoint('$tableName', '${commits.apply(1).getString(0)}')""")(Seq(true))
 
       // show savepoints with only 2
       val savepoints = spark.sql(s"""call show_savepoints(table => '$tableName')""").collect()
@@ -138,7 +138,7 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
     }
   }
 
-  test("Test Call rollback_savepoints Procedure") {
+  test("Test Call rollback_to_savepoint Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       // create table
@@ -168,11 +168,11 @@ class TestSavepointsProcedure extends HoodieSparkSqlTestBase {
 
       // create 2 savepoints
       commits.foreach(r => {
-        checkAnswer(s"""call create_savepoints('$tableName', '${r.getString(0)}')""")(Seq(true))
+        checkAnswer(s"""call create_savepoint('$tableName', '${r.getString(0)}')""")(Seq(true))
       })
 
       // rollback savepoints
-      checkAnswer(s"""call rollback_savepoints('$tableName', '${commits.apply(0).getString(0)}')""")(Seq(true))
+      checkAnswer(s"""call rollback_to_savepoint('$tableName', '${commits.apply(0).getString(0)}')""")(Seq(true))
     }
   }
 }
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
index 2da5392e9b..ad0179b58b 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala
@@ -60,7 +60,7 @@ class TestStatsProcedure extends HoodieSparkSqlTestBase {
     }
   }
 
-  test("Test Call stats_filesizes Procedure") {
+  test("Test Call stats_file_sizes Procedure") {
     withTempDir { tmp =>
       val tableName = generateTableName
       val tablePath = s"${tmp.getCanonicalPath}/$tableName"
@@ -85,12 +85,12 @@ class TestStatsProcedure extends HoodieSparkSqlTestBase {
       spark.sql(s"insert into $tableName select 2, 'a2', 20, 1500")
 
       // Check required fields
-      checkExceptionContain(s"""call stats_filesizes(limit => 10)""")(
+      checkExceptionContain(s"""call stats_file_sizes(limit => 10)""")(
         s"Argument: table is required")
 
       // collect result for table
       val result = spark.sql(
-        s"""call stats_filesizes(table => '$tableName', partition_path => '/*')""".stripMargin).collect()
+        s"""call stats_file_sizes(table => '$tableName', partition_path => '/*')""".stripMargin).collect()
       assertResult(3) {
         result.length
       }
diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
index 55c184ab56..a9a763c8fd 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala
@@ -48,7 +48,7 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
        """.stripMargin)
       // Check required fields
       checkExceptionContain(s"""call downgrade_table(table => '$tableName')""")(
-        s"Argument: toVersion is required")
+        s"Argument: to_version is required")
 
       var metaClient = HoodieTableMetaClient.builder
         .setConf(new JavaSparkContext(spark.sparkContext).hadoopConfiguration())
@@ -62,7 +62,7 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
       assertTableVersionFromPropertyFile(metaClient, HoodieTableVersion.FOUR.versionCode)
 
       // downgrade table to ZERO
-      checkAnswer(s"""call downgrade_table(table => '$tableName', toVersion => 'ZERO')""")(Seq(true))
+      checkAnswer(s"""call downgrade_table(table => '$tableName', to_version => 'ZERO')""")(Seq(true))
 
       // verify the downgraded hoodie.table.version
       metaClient = HoodieTableMetaClient.reload(metaClient)
@@ -72,7 +72,7 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase {
       assertTableVersionFromPropertyFile(metaClient, HoodieTableVersion.ZERO.versionCode)
 
       // upgrade table to ONE
-      checkAnswer(s"""call upgrade_table(table => '$tableName', toVersion => 'ONE')""")(Seq(true))
+      checkAnswer(s"""call upgrade_table(table => '$tableName', to_version => 'ONE')""")(Seq(true))
 
       // verify the upgraded hoodie.table.version
       metaClient = HoodieTableMetaClient.reload(metaClient)