You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by el...@apache.org on 2020/08/27 08:43:47 UTC

[hadoop-ozone] branch master updated: HDDS-4056. Convert OzoneAdmin to pluggable model (#1285)

This is an automated email from the ASF dual-hosted git repository.

elek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new bc7786a  HDDS-4056. Convert OzoneAdmin to pluggable model (#1285)
bc7786a is described below

commit bc7786a2fafb2d36923506f8de6c25fcfd26d55b
Author: Doroszlai, Attila <64...@users.noreply.github.com>
AuthorDate: Thu Aug 27 10:43:39 2020 +0200

    HDDS-4056. Convert OzoneAdmin to pluggable model (#1285)
---
 .../org/apache/hadoop/hdds/cli/package-info.java   |   4 +-
 hadoop-hdds/tools/pom.xml                          |   8 ++
 .../org/apache/hadoop/hdds/cli/OzoneAdmin.java     |  67 +++++++++++
 .../WithScmClient.java => cli/package-info.java}   |  19 +---
 .../hdds/scm/cli/ReplicationManagerCommands.java   |  23 ++--
 .../scm/cli/ReplicationManagerStartSubcommand.java |  21 ++--
 .../cli/ReplicationManagerStatusSubcommand.java    |  32 ++----
 .../scm/cli/ReplicationManagerStopSubcommand.java  |  25 ++---
 .../hdds/scm/cli/SafeModeCheckSubcommand.java      |  40 +++----
 .../hadoop/hdds/scm/cli/SafeModeCommands.java      |  27 ++---
 .../hdds/scm/cli/SafeModeExitSubcommand.java       |  22 ++--
 .../hdds/scm/cli/SafeModeWaitSubcommand.java       |  13 +--
 .../org/apache/hadoop/hdds/scm/cli/ScmOption.java  |  72 ++++++++++++
 .../WithScmClient.java => ScmSubcommand.java}      |  24 +++-
 .../hadoop/hdds/scm/cli/TopologySubcommand.java    |  65 ++++++-----
 .../hdds/scm/cli/container/CloseSubcommand.java    |  20 ++--
 .../hdds/scm/cli/container/ContainerCommands.java  |  21 ++--
 .../hdds/scm/cli/container/CreateSubcommand.java   |  26 ++---
 .../hdds/scm/cli/container/DeleteSubcommand.java   |  20 ++--
 .../hdds/scm/cli/container/InfoSubcommand.java     |  40 +++----
 .../hdds/scm/cli/container/ListSubcommand.java     |  32 ++----
 .../hdds/scm/cli/datanode/DatanodeCommands.java    |  21 ++--
 .../hdds/scm/cli/datanode/ListInfoSubcommand.java  |  48 ++++----
 .../cli/pipeline/ActivatePipelineSubcommand.java   |  19 ++--
 .../scm/cli/pipeline/ClosePipelineSubcommand.java  |  19 ++--
 .../scm/cli/pipeline/CreatePipelineSubcommand.java |  38 +++----
 .../cli/pipeline/DeactivatePipelineSubcommand.java |  19 ++--
 .../scm/cli/pipeline/ListPipelinesSubcommand.java  |  40 +++----
 .../hdds/scm/cli/pipeline/PipelineCommands.java    |  22 ++--
 .../admincli/{datanode.robot => admin.robot}       |  20 ++--
 .../src/main/smoketest/admincli/container.robot    |  68 ++++++++++++
 .../src/main/smoketest/admincli/datanode.robot     |  19 ++--
 .../src/main/smoketest/admincli/pipeline.robot     |  49 +++++++--
 .../smoketest/admincli/replicationmanager.robot    |  53 +++++++++
 .../src/main/smoketest/admincli/safemode.robot     |  45 ++++++++
 hadoop-ozone/dist/src/shell/ozone/ozone            |   2 +-
 .../hadoop/ozone/shell/TestOzoneDatanodeShell.java |   2 +-
 hadoop-ozone/tools/pom.xml                         |   2 -
 .../org/apache/hadoop/ozone/admin/OzoneAdmin.java  | 122 ---------------------
 .../org/apache/hadoop/ozone/admin/om/OMAdmin.java  |   2 +-
 .../TestGenerateOzoneRequiredConfigurations.java   |   5 +-
 pom.xml                                            |   8 +-
 42 files changed, 686 insertions(+), 558 deletions(-)

diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java
index 8dcc1d1..aabad6f 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/package-info.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,4 +19,4 @@
 /**
  * Generic helper class to make instantiate picocli based cli tools.
  */
-package org.apache.hadoop.hdds.cli;
\ No newline at end of file
+package org.apache.hadoop.hdds.cli;
diff --git a/hadoop-hdds/tools/pom.xml b/hadoop-hdds/tools/pom.xml
index f362a0b..fcc553f 100644
--- a/hadoop-hdds/tools/pom.xml
+++ b/hadoop-hdds/tools/pom.xml
@@ -67,6 +67,14 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
       <artifactId>commons-cli</artifactId>
     </dependency>
     <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.kohsuke.metainf-services</groupId>
+      <artifactId>metainf-services</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.xerial</groupId>
       <artifactId>sqlite-jdbc</artifactId>
     </dependency>
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java
new file mode 100644
index 0000000..aca8a4c
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.cli;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.util.NativeCodeLoader;
+
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.Level;
+import org.apache.log4j.LogManager;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import picocli.CommandLine;
+
+/**
+ * Ozone Admin Command line tool.
+ */
+@CommandLine.Command(name = "ozone admin",
+    hidden = true,
+    description = "Developer tools for Ozone Admin operations",
+    versionProvider = HddsVersionProvider.class,
+    mixinStandardHelpOptions = true)
+public class OzoneAdmin extends GenericCli {
+
+  private OzoneConfiguration ozoneConf;
+
+  public OzoneAdmin() {
+    super(OzoneAdmin.class);
+  }
+
+  public OzoneConfiguration getOzoneConf() {
+    if (ozoneConf == null) {
+      ozoneConf = createOzoneConfiguration();
+    }
+    return ozoneConf;
+  }
+
+  /**
+   * Main for the Ozone Admin shell Command handling.
+   *
+   * @param argv - System Args Strings[]
+   */
+  public static void main(String[] argv) {
+    LogManager.resetConfiguration();
+    Logger.getRootLogger().setLevel(Level.INFO);
+    Logger.getRootLogger()
+        .addAppender(new ConsoleAppender(new PatternLayout("%m%n")));
+    Logger.getLogger(NativeCodeLoader.class).setLevel(Level.ERROR);
+
+    new OzoneAdmin().run(argv);
+  }
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/WithScmClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/package-info.java
similarity index 71%
copy from hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/WithScmClient.java
copy to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/package-info.java
index 9852d50..82fbd72 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/WithScmClient.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/package-info.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,24 +6,17 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.scm.cli.container;
-
-import org.apache.hadoop.hdds.scm.client.ScmClient;
 
 /**
- * Command which provides a SCM client based on the current config.
+ * Command-line tools for HDDS.
  */
-public interface WithScmClient {
-
-  ScmClient createScmClient();
-
-}
+package org.apache.hadoop.hdds.cli;
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerCommands.java
index fcb9ad6..cd5aba3 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerCommands.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerCommands.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,11 +21,12 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.GenericCli;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.scm.cli.container.WithScmClient;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
+import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 
+import org.kohsuke.MetaInfServices;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Model.CommandSpec;
-import picocli.CommandLine.ParentCommand;
 import picocli.CommandLine.Spec;
 
 /**
@@ -41,21 +42,21 @@ import picocli.CommandLine.Spec;
         ReplicationManagerStopSubcommand.class,
         ReplicationManagerStatusSubcommand.class
     })
-public class ReplicationManagerCommands implements Callable<Void> {
+@MetaInfServices(SubcommandWithParent.class)
+public class ReplicationManagerCommands implements Callable<Void>,
+    SubcommandWithParent {
 
   @Spec
   private CommandSpec spec;
 
-  @ParentCommand
-  private WithScmClient parent;
-
-  public WithScmClient getParent() {
-    return parent;
-  }
-
   @Override
   public Void call() throws Exception {
     GenericCli.missingSubcommand(spec);
     return null;
   }
+
+  @Override
+  public Class<?> getParentType() {
+    return OzoneAdmin.class;
+  }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
index 1adec6b..ff82b82 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,32 +22,25 @@ import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
-import picocli.CommandLine.ParentCommand;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
- * This is the handler that process safe mode check command.
+ * Handler to start replication manager.
  */
 @Command(
     name = "start",
     description = "Start ReplicationManager",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ReplicationManagerStartSubcommand implements Callable<Void> {
+public class ReplicationManagerStartSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(ReplicationManagerStartSubcommand.class);
 
-  @ParentCommand
-  private ReplicationManagerCommands parent;
-
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      scmClient.startReplicationManager();
-      LOG.info("Starting ReplicationManager...");
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    scmClient.startReplicationManager();
+    LOG.info("Starting ReplicationManager...");
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
index 2ebf28c..c6800be 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,39 +22,31 @@ import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
-import picocli.CommandLine.ParentCommand;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
- * This is the handler that process safe mode check command.
+ * Handler to query status of replication manager.
  */
 @Command(
     name = "status",
     description = "Check if ReplicationManager is running or not",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ReplicationManagerStatusSubcommand implements Callable<Void> {
+public class ReplicationManagerStatusSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(ReplicationManagerStatusSubcommand.class);
 
-  @ParentCommand
-  private ReplicationManagerCommands parent;
-
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-
-      boolean execReturn = scmClient.getReplicationManagerStatus();
-
-      // Output data list
-      if(execReturn){
-        LOG.info("ReplicationManager is Running.");
-      } else {
-        LOG.info("ReplicationManager is Not Running.");
-      }
-      return null;
+  public void execute(ScmClient scmClient) throws IOException {
+    boolean execReturn = scmClient.getReplicationManagerStatus();
+
+    // Output data list
+    if(execReturn){
+      LOG.info("ReplicationManager is Running.");
+    } else {
+      LOG.info("ReplicationManager is Not Running.");
     }
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
index 7cafd01..7d3063a 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,34 +22,27 @@ import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
-import picocli.CommandLine.ParentCommand;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
- * This is the handler that process safe mode check command.
+ * Handler to stop replication manager.
  */
 @Command(
     name = "stop",
     description = "Stop ReplicationManager",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ReplicationManagerStopSubcommand implements Callable<Void> {
+public class ReplicationManagerStopSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(ReplicationManagerStopSubcommand.class);
 
-  @ParentCommand
-  private ReplicationManagerCommands parent;
-
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      scmClient.stopReplicationManager();
-      LOG.info("Stopping ReplicationManager...");
-      LOG.info("Requested SCM to stop ReplicationManager, " +
-          "it might take sometime for the ReplicationManager to stop.");
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    scmClient.stopReplicationManager();
+    LOG.info("Stopping ReplicationManager...");
+    LOG.info("Requested SCM to stop ReplicationManager, " +
+        "it might take sometime for the ReplicationManager to stop.");
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
index b2cfea3..ba359af 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hdds.scm.cli;
 
+import java.io.IOException;
 import java.util.Map;
-import java.util.concurrent.Callable;
 
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
@@ -28,7 +28,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine;
 import picocli.CommandLine.Command;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * This is the handler that process safe mode check command.
@@ -38,39 +37,32 @@ import picocli.CommandLine.ParentCommand;
     description = "Check if SCM is in safe mode",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class SafeModeCheckSubcommand implements Callable<Void> {
+public class SafeModeCheckSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(SafeModeCheckSubcommand.class);
 
-  @ParentCommand
-  private SafeModeCommands parent;
-
   @CommandLine.Option(names = {"--verbose"},
       description = "Show detailed status of rules.")
   private boolean verbose;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-
-      boolean execReturn = scmClient.inSafeMode();
+  public void execute(ScmClient scmClient) throws IOException {
+    boolean execReturn = scmClient.inSafeMode();
 
-      // Output data list
-      if(execReturn){
-        LOG.info("SCM is in safe mode.");
-        if (verbose) {
-          for (Map.Entry<String, Pair<Boolean, String>> entry :
-              scmClient.getSafeModeRuleStatuses().entrySet()) {
-            Pair<Boolean, String> value = entry.getValue();
-            LOG.info("validated:{}, {}, {}",
-                value.getLeft(), entry.getKey(), value.getRight());
-          }
+    // Output data list
+    if(execReturn){
+      LOG.info("SCM is in safe mode.");
+      if (verbose) {
+        for (Map.Entry<String, Pair<Boolean, String>> entry :
+            scmClient.getSafeModeRuleStatuses().entrySet()) {
+          Pair<Boolean, String> value = entry.getValue();
+          LOG.info("validated:{}, {}, {}",
+              value.getLeft(), entry.getKey(), value.getRight());
         }
-      } else {
-        LOG.info("SCM is out of safe mode.");
       }
-      return null;
+    } else {
+      LOG.info("SCM is out of safe mode.");
     }
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCommands.java
index 017e1ba..6ba7cf2 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCommands.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCommands.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,13 +21,12 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.GenericCli;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.scm.cli.container.WithScmClient;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
+import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.kohsuke.MetaInfServices;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Model.CommandSpec;
-import picocli.CommandLine.ParentCommand;
 import picocli.CommandLine.Spec;
 
 /**
@@ -43,24 +42,20 @@ import picocli.CommandLine.Spec;
         SafeModeExitSubcommand.class,
         SafeModeWaitSubcommand.class
     })
-public class SafeModeCommands implements Callable<Void> {
-
-  private static final Logger LOG =
-      LoggerFactory.getLogger(SafeModeCommands.class);
+@MetaInfServices(SubcommandWithParent.class)
+public class SafeModeCommands implements Callable<Void>, SubcommandWithParent {
 
   @Spec
   private CommandSpec spec;
 
-  @ParentCommand
-  private WithScmClient parent;
-
-  public WithScmClient getParent() {
-    return parent;
-  }
-
   @Override
   public Void call() throws Exception {
     GenericCli.missingSubcommand(spec);
     return null;
   }
+
+  @Override
+  public Class<?> getParentType() {
+    return OzoneAdmin.class;
+  }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
index 9f1db45..12490c5 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hdds.scm.cli;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
@@ -25,7 +25,6 @@ import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * This is the handler that process safe mode exit command.
@@ -35,23 +34,16 @@ import picocli.CommandLine.ParentCommand;
     description = "Force SCM out of safe mode",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class SafeModeExitSubcommand implements Callable<Void> {
+public class SafeModeExitSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(SafeModeExitSubcommand.class);
 
-  @ParentCommand
-  private SafeModeCommands parent;
-
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-
-      boolean execReturn = scmClient.forceExitSafeMode();
-      if(execReturn){
-        LOG.info("SCM exit safe mode successfully.");
-      }
-      return null;
+  public void execute(ScmClient scmClient) throws IOException {
+    boolean execReturn = scmClient.forceExitSafeMode();
+    if(execReturn){
+      LOG.info("SCM exit safe mode successfully.");
     }
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
index 7668a47..e3fb5c1 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -27,7 +27,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Option;
-import picocli.CommandLine.ParentCommand;
+import picocli.CommandLine.Mixin;
 
 /**
  * This is the handler that process safe mode wait command.
@@ -45,21 +45,20 @@ public class SafeModeWaitSubcommand implements Callable<Void> {
   @Option(description =
       "Define timeout (in second) to wait until (exit code 1) "
           + "or until safemode is ended (exit code 0).", defaultValue = "30",
-      required = false, names = {
-      "-t", "--timeout"})
+      names = { "-t", "--timeout"})
   private long timeoutSeconds;
 
   private long startTestTime;
 
-  @ParentCommand
-  private SafeModeCommands parent;
+  @Mixin
+  private ScmOption scmOption;
 
   @Override
   public Void call() throws Exception {
     startTestTime = System.currentTimeMillis();
 
     while (getRemainingTimeInSec() > 0) {
-      try (ScmClient scmClient = parent.getParent().createScmClient()) {
+      try (ScmClient scmClient = scmOption.createScmClient()) {
         while (getRemainingTimeInSec() > 0) {
 
           boolean isSafeModeActive = scmClient.inSafeMode();
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ScmOption.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ScmOption.java
new file mode 100644
index 0000000..5b8b814
--- /dev/null
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ScmOption.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.hdds.cli.GenericParentCommand;
+import org.apache.hadoop.hdds.conf.MutableConfigurationSource;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import picocli.CommandLine;
+
+import java.io.IOException;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY;
+import static picocli.CommandLine.Spec.Target.MIXEE;
+
+/**
+ * Defines command-line option for SCM address.
+ */
+public class ScmOption {
+
+  @CommandLine.Spec(MIXEE)
+  private CommandLine.Model.CommandSpec spec;
+
+  @CommandLine.Option(names = {"--scm"},
+      description = "The destination scm (host:port)")
+  private String scm;
+
+  public ScmClient createScmClient() {
+    try {
+      GenericParentCommand parent = (GenericParentCommand)
+          spec.root().userObject();
+      OzoneConfiguration conf = parent.createOzoneConfiguration();
+      checkAndSetSCMAddressArg(conf);
+
+      return new ContainerOperationClient(conf);
+    } catch (IOException ex) {
+      throw new IllegalArgumentException("Can't create SCM client", ex);
+    }
+  }
+
+  private void checkAndSetSCMAddressArg(MutableConfigurationSource conf) {
+    if (StringUtils.isNotEmpty(scm)) {
+      conf.set(OZONE_SCM_CLIENT_ADDRESS_KEY, scm);
+    }
+    if (!HddsUtils.getHostNameFromConfigKeys(conf,
+        ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY).isPresent()) {
+
+      throw new IllegalArgumentException(
+          ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY
+              + " should be set in ozone-site.xml or with the --scm option");
+    }
+  }
+
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/WithScmClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ScmSubcommand.java
similarity index 60%
rename from hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/WithScmClient.java
rename to hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ScmSubcommand.java
index 9852d50..6dc09c2 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/WithScmClient.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ScmSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,15 +15,29 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdds.scm.cli.container;
+package org.apache.hadoop.hdds.scm.cli;
 
 import org.apache.hadoop.hdds.scm.client.ScmClient;
+import picocli.CommandLine;
+
+import java.io.IOException;
+import java.util.concurrent.Callable;
 
 /**
- * Command which provides a SCM client based on the current config.
+ * Base class for admin commands that connect via SCM client.
  */
-public interface WithScmClient {
+public abstract class ScmSubcommand implements Callable<Void> {
+
+  @CommandLine.Mixin
+  private ScmOption scmOption;
 
-  ScmClient createScmClient();
+  protected abstract void execute(ScmClient client) throws IOException;
 
+  @Override
+  public final Void call() throws Exception {
+    try (ScmClient scmClient = scmOption.createScmClient()) {
+      execute(scmClient);
+      return null;
+    }
+  }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
index 214da34..c1aebae 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,18 +18,19 @@
 
 package org.apache.hadoop.hdds.scm.cli;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.TreeSet;
-import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
+import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.scm.cli.container.WithScmClient;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 
 import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DEAD;
@@ -37,9 +38,9 @@ import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DECOMMI
 import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.DECOMMISSIONING;
 import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.HEALTHY;
 import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState.STALE;
+
+import org.kohsuke.MetaInfServices;
 import picocli.CommandLine;
-import picocli.CommandLine.Model.CommandSpec;
-import picocli.CommandLine.Spec;
 
 /**
  * Handler of printTopology command.
@@ -49,22 +50,18 @@ import picocli.CommandLine.Spec;
     description = "Print a tree of the network topology as reported by SCM",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class TopologySubcommand implements Callable<Void> {
-
-  @Spec
-  private CommandSpec spec;
-
-  @CommandLine.ParentCommand
-  private WithScmClient parent;
+@MetaInfServices(SubcommandWithParent.class)
+public class TopologySubcommand extends ScmSubcommand
+    implements SubcommandWithParent {
 
-  private static List<HddsProtos.NodeState> stateArray = new ArrayList<>();
+  private static final List<HddsProtos.NodeState> STATES = new ArrayList<>();
 
   static {
-    stateArray.add(HEALTHY);
-    stateArray.add(STALE);
-    stateArray.add(DEAD);
-    stateArray.add(DECOMMISSIONING);
-    stateArray.add(DECOMMISSIONED);
+    STATES.add(HEALTHY);
+    STATES.add(STALE);
+    STATES.add(DEAD);
+    STATES.add(DECOMMISSIONING);
+    STATES.add(DECOMMISSIONED);
   }
 
   @CommandLine.Option(names = {"-o", "--order"},
@@ -76,22 +73,24 @@ public class TopologySubcommand implements Callable<Void> {
   private boolean fullInfo;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.createScmClient()) {
-      for (HddsProtos.NodeState state : stateArray) {
-        List<HddsProtos.Node> nodes = scmClient.queryNode(state,
-            HddsProtos.QueryScope.CLUSTER, "");
-        if (nodes != null && nodes.size() > 0) {
-          // show node state
-          System.out.println("State = " + state.toString());
-          if (order) {
-            printOrderedByLocation(nodes);
-          } else {
-            printNodesWithLocation(nodes);
-          }
+  public Class<?> getParentType() {
+    return OzoneAdmin.class;
+  }
+
+  @Override
+  protected void execute(ScmClient scmClient) throws IOException {
+    for (HddsProtos.NodeState state : STATES) {
+      List<HddsProtos.Node> nodes = scmClient.queryNode(state,
+          HddsProtos.QueryScope.CLUSTER, "");
+      if (nodes != null && !nodes.isEmpty()) {
+        // show node state
+        System.out.println("State = " + state.toString());
+        if (order) {
+          printOrderedByLocation(nodes);
+        } else {
+          printNodesWithLocation(nodes);
         }
       }
-      return null;
     }
   }
 
@@ -124,7 +123,7 @@ public class TopologySubcommand implements Callable<Void> {
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < ports.size(); i++) {
       HddsProtos.Port port = ports.get(i);
-      sb.append(port.getName() + "=" + port.getValue());
+      sb.append(port.getName()).append("=").append(port.getValue());
       if (i < ports.size() - 1) {
         sb.append(",");
       }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java
index cd81d32..53cbd2f 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CloseSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,15 +17,15 @@
  */
 package org.apache.hadoop.hdds.scm.cli.container;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 
 import static org.apache.hadoop.hdds.scm.cli.container.ContainerCommands.checkContainerExists;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Parameters;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * The handler of close container command.
@@ -35,21 +35,15 @@ import picocli.CommandLine.ParentCommand;
     description = "close container",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class CloseSubcommand implements Callable<Void> {
-
-  @ParentCommand
-  private ContainerCommands parent;
+public class CloseSubcommand extends ScmSubcommand {
 
   @Parameters(description = "Id of the container to close")
   private long containerId;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      checkContainerExists(scmClient, containerId);
-      scmClient.closeContainer(containerId);
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    checkContainerExists(scmClient, containerId);
+    scmClient.closeContainer(containerId);
   }
 
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommands.java
index cf665b0..de1015d 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommands.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ContainerCommands.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,12 +22,14 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.GenericCli;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
+import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.container.ContainerInfo;
 
+import org.kohsuke.MetaInfServices;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Model.CommandSpec;
-import picocli.CommandLine.ParentCommand;
 import picocli.CommandLine.Spec;
 
 /**
@@ -45,24 +47,23 @@ import picocli.CommandLine.Spec;
         CreateSubcommand.class,
         CloseSubcommand.class
     })
-public class ContainerCommands implements Callable<Void> {
+@MetaInfServices(SubcommandWithParent.class)
+public class ContainerCommands implements Callable<Void>, SubcommandWithParent {
 
   @Spec
   private CommandSpec spec;
 
-  @ParentCommand
-  private WithScmClient parent;
-
-  public WithScmClient getParent() {
-    return parent;
-  }
-
   @Override
   public Void call() throws Exception {
     GenericCli.missingSubcommand(spec);
     return null;
   }
 
+  @Override
+  public Class<?> getParentType() {
+    return OzoneAdmin.class;
+  }
+
   public static void checkContainerExists(ScmClient scmClient, long containerId)
       throws IOException {
     ContainerInfo container = scmClient.getContainer(containerId);
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
index eb79e50..9eedbf8 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hdds.scm.cli.container;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.container.common.helpers
     .ContainerWithPipeline;
@@ -28,7 +29,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Option;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * This is the handler that process container creation command.
@@ -38,27 +38,19 @@ import picocli.CommandLine.ParentCommand;
     description = "Create container",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class CreateSubcommand implements Callable<Void> {
+public class CreateSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(CreateSubcommand.class);
 
-  @ParentCommand
-  private ContainerCommands parent;
-
   @Option(description = "Owner of the new container", defaultValue = "OZONE",
-      required = false, names = {
-      "-o", "--owner"})
-
+      names = { "-o", "--owner"})
   private String owner;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      ContainerWithPipeline container = scmClient.createContainer(owner);
-      LOG.info("Container {} is created.",
-          container.getContainerInfo().getContainerID());
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    ContainerWithPipeline container = scmClient.createContainer(owner);
+    LOG.info("Container {} is created.",
+        container.getContainerInfo().getContainerID());
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java
index a438fe9..62d1b8a 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/DeleteSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,16 +18,16 @@
 
 package org.apache.hadoop.hdds.scm.cli.container;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 
 import static org.apache.hadoop.hdds.scm.cli.container.ContainerCommands.checkContainerExists;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Option;
 import picocli.CommandLine.Parameters;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * This is the handler that process delete container command.
@@ -37,7 +37,7 @@ import picocli.CommandLine.ParentCommand;
     description = "Delete container",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class DeleteSubcommand implements Callable<Void> {
+public class DeleteSubcommand extends ScmSubcommand {
 
   @Parameters(description = "Id of the container to close")
   private long containerId;
@@ -46,15 +46,9 @@ public class DeleteSubcommand implements Callable<Void> {
       "--force"}, description = "forcibly delete the container")
   private boolean force;
 
-  @ParentCommand
-  private ContainerCommands parent;
-
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      checkContainerExists(scmClient, containerId);
-      scmClient.deleteContainer(containerId, force);
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    checkContainerExists(scmClient, containerId);
+    scmClient.deleteContainer(containerId, force);
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
index 31e2a45..5defc24 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,11 +17,12 @@
  */
 package org.apache.hadoop.hdds.scm.cli.container;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 import java.util.stream.Collectors;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.container.common.helpers
     .ContainerWithPipeline;
@@ -31,7 +32,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Parameters;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * This is the handler that process container info command.
@@ -41,36 +41,30 @@ import picocli.CommandLine.ParentCommand;
     description = "Show information about a specific container",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class InfoSubcommand implements Callable<Void> {
+public class InfoSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(InfoSubcommand.class);
 
-  @ParentCommand
-  private ContainerCommands parent;
-
   @Parameters(description = "Decimal id of the container.")
   private long containerID;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      final ContainerWithPipeline container = scmClient.
-          getContainerWithPipeline(containerID);
-      Preconditions.checkNotNull(container, "Container cannot be null");
+  public void execute(ScmClient scmClient) throws IOException {
+    final ContainerWithPipeline container = scmClient.
+        getContainerWithPipeline(containerID);
+    Preconditions.checkNotNull(container, "Container cannot be null");
 
-      // Print container report info.
-      LOG.info("Container id: {}", containerID);
-      LOG.info("Pipeline id: {}", container.getPipeline().getId().getId());
-      LOG.info("Container State: {}", container.getContainerInfo().getState());
+    // Print container report info.
+    LOG.info("Container id: {}", containerID);
+    LOG.info("Pipeline id: {}", container.getPipeline().getId().getId());
+    LOG.info("Container State: {}", container.getContainerInfo().getState());
 
-      // Print pipeline of an existing container.
-      String machinesStr = container.getPipeline().getNodes().stream().map(
-          InfoSubcommand::buildDatanodeDetails)
-          .collect(Collectors.joining(",\n"));
-      LOG.info("Datanodes: [{}]", machinesStr);
-      return null;
-    }
+    // Print pipeline of an existing container.
+    String machinesStr = container.getPipeline().getNodes().stream().map(
+        InfoSubcommand::buildDatanodeDetails)
+        .collect(Collectors.joining(",\n"));
+    LOG.info("Datanodes: [{}]", machinesStr);
   }
 
   private static String buildDatanodeDetails(DatanodeDetails details) {
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
index 3ffc118..e9b0b7d 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,9 +19,9 @@ package org.apache.hadoop.hdds.scm.cli.container;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.container.ContainerInfo;
 
@@ -36,7 +36,6 @@ import org.slf4j.LoggerFactory;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Help.Visibility;
 import picocli.CommandLine.Option;
-import picocli.CommandLine.ParentCommand;
 
 /**
  * This is the handler that process container list command.
@@ -46,22 +45,19 @@ import picocli.CommandLine.ParentCommand;
     description = "List containers",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ListSubcommand implements Callable<Void> {
+public class ListSubcommand extends ScmSubcommand {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(ListSubcommand.class);
 
-  @ParentCommand
-  private ContainerCommands parent;
-
   @Option(names = {"-s", "--start"},
-      description = "Container id to start the iteration", required = false)
-  private long startId = 0;
+      description = "Container id to start the iteration")
+  private long startId;
 
   @Option(names = {"-c", "--count"},
       description = "Maximum number of containers to list",
       defaultValue = "20", showDefaultValue = Visibility.ALWAYS)
-  private int count = 20;
+  private int count;
 
   private static final ObjectWriter WRITER;
 
@@ -83,17 +79,13 @@ public class ListSubcommand implements Callable<Void> {
   }
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-
-      List<ContainerInfo> containerList =
-          scmClient.listContainer(startId, count);
+  public void execute(ScmClient scmClient) throws IOException {
+    List<ContainerInfo> containerList =
+        scmClient.listContainer(startId, count);
 
-      // Output data list
-      for (ContainerInfo container : containerList) {
-        outputContainerInfo(container);
-      }
-      return null;
+    // Output data list
+    for (ContainerInfo container : containerList) {
+      outputContainerInfo(container);
     }
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DatanodeCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DatanodeCommands.java
index b7ba59c..7e77c60 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DatanodeCommands.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/DatanodeCommands.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,8 +21,10 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.GenericCli;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.scm.cli.container.WithScmClient;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
+import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 
+import org.kohsuke.MetaInfServices;
 import picocli.CommandLine;
 import picocli.CommandLine.Model.CommandSpec;
 import picocli.CommandLine.Spec;
@@ -38,21 +40,20 @@ import picocli.CommandLine.Spec;
     subcommands = {
         ListInfoSubcommand.class
     })
-public class DatanodeCommands implements Callable<Void> {
+@MetaInfServices(SubcommandWithParent.class)
+public class DatanodeCommands implements Callable<Void>, SubcommandWithParent {
 
   @Spec
   private CommandSpec spec;
 
-  @CommandLine.ParentCommand
-  private WithScmClient parent;
-
-  public WithScmClient getParent() {
-    return parent;
-  }
-
   @Override
   public Void call() throws Exception {
     GenericCli.missingSubcommand(spec);
     return null;
   }
+
+  @Override
+  public Class<?> getParentType() {
+    return OzoneAdmin.class;
+  }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
index e4060b3..80c5eca 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,13 +21,13 @@ import com.google.common.base.Strings;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import picocli.CommandLine;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.concurrent.Callable;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
@@ -39,44 +39,36 @@ import java.util.stream.Stream;
     description = "List info of datanodes",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ListInfoSubcommand implements Callable<Void> {
-
-  @CommandLine.ParentCommand
-  private DatanodeCommands parent;
+public class ListInfoSubcommand extends ScmSubcommand {
 
   @CommandLine.Option(names = {"--ip"},
       description = "Show info by ip address.",
-      defaultValue = "",
-      required = false)
+      defaultValue = "")
   private String ipaddress;
 
   @CommandLine.Option(names = {"--id"},
       description = "Show info by datanode UUID.",
-      defaultValue = "",
-      required = false)
+      defaultValue = "")
   private String uuid;
 
   private List<Pipeline> pipelines;
 
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      pipelines = scmClient.listPipelines();
-      if (Strings.isNullOrEmpty(ipaddress) && Strings.isNullOrEmpty(uuid)) {
-        getAllNodes(scmClient).stream().forEach(p -> printDatanodeInfo(p));
-      } else {
-        Stream<DatanodeDetails> allNodes = getAllNodes(scmClient).stream();
-        if (!Strings.isNullOrEmpty(ipaddress)) {
-          allNodes = allNodes.filter(p -> p.getIpAddress()
-              .compareToIgnoreCase(ipaddress) == 0);
-        }
-        if (!Strings.isNullOrEmpty(uuid)) {
-          allNodes = allNodes.filter(p -> p.getUuid().toString().equals(uuid));
-        }
-        allNodes.forEach(p -> printDatanodeInfo(p));
+  public void execute(ScmClient scmClient) throws IOException {
+    pipelines = scmClient.listPipelines();
+    if (Strings.isNullOrEmpty(ipaddress) && Strings.isNullOrEmpty(uuid)) {
+      getAllNodes(scmClient).forEach(this::printDatanodeInfo);
+    } else {
+      Stream<DatanodeDetails> allNodes = getAllNodes(scmClient).stream();
+      if (!Strings.isNullOrEmpty(ipaddress)) {
+        allNodes = allNodes.filter(p -> p.getIpAddress()
+            .compareToIgnoreCase(ipaddress) == 0);
+      }
+      if (!Strings.isNullOrEmpty(uuid)) {
+        allNodes = allNodes.filter(p -> p.getUuid().toString().equals(uuid));
       }
-      return null;
+      allNodes.forEach(this::printDatanodeInfo);
     }
   }
 
@@ -101,7 +93,7 @@ public class ListInfoSubcommand implements Callable<Void> {
             " or the node is not in Healthy state.");
       } else {
         relatedPipelineNum = relatedPipelines.size();
-        relatedPipelines.stream().forEach(
+        relatedPipelines.forEach(
             p -> pipelineListInfo.append(p.getId().getId().toString())
                 .append("/").append(p.getFactor().toString()).append("/")
                 .append(p.getType().toString()).append("/")
@@ -118,4 +110,4 @@ public class ListInfoSubcommand implements Callable<Void> {
         + "/" + datanode.getHostName() + "/" + relatedPipelineNum +
         " pipelines) \n" + "Related pipelines: \n" + pipelineListInfo);
   }
-}
\ No newline at end of file
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ActivatePipelineSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ActivatePipelineSubcommand.java
index ec4b1b7..a61655d 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ActivatePipelineSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ActivatePipelineSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,10 +20,11 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import picocli.CommandLine;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
  * Handler of activate pipeline command.
@@ -33,20 +34,14 @@ import java.util.concurrent.Callable;
     description = "Activates the given Pipeline",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ActivatePipelineSubcommand implements Callable<Void> {
-
-  @CommandLine.ParentCommand
-  private PipelineCommands parent;
+public class ActivatePipelineSubcommand extends ScmSubcommand {
 
   @CommandLine.Parameters(description = "ID of the pipeline to activate")
   private String pipelineId;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      scmClient.activatePipeline(
-          HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    scmClient.activatePipeline(
+        HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
index 89a280e..78b83e5 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ClosePipelineSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,10 +20,11 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import picocli.CommandLine;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
  * Handler of close pipeline command.
@@ -33,20 +34,14 @@ import java.util.concurrent.Callable;
     description = "Close pipeline",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ClosePipelineSubcommand implements Callable<Void> {
-
-  @CommandLine.ParentCommand
-  private PipelineCommands parent;
+public class ClosePipelineSubcommand extends ScmSubcommand {
 
   @CommandLine.Parameters(description = "ID of the pipeline to close")
   private String pipelineId;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      scmClient.closePipeline(
-          HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    scmClient.closePipeline(
+        HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
index e0bdddb..c784be8 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,11 +20,12 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import picocli.CommandLine;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
  * Handler of createPipeline command.
@@ -34,44 +35,37 @@ import java.util.concurrent.Callable;
     description = "create pipeline",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class CreatePipelineSubcommand implements Callable<Void> {
-  @CommandLine.ParentCommand
-  private PipelineCommands parent;
+public class CreatePipelineSubcommand extends ScmSubcommand {
 
   @CommandLine.Option(
       names = {"-t", "--replicationType"},
       description = "Replication type (STAND_ALONE, RATIS)",
       defaultValue = "STAND_ALONE"
   )
-  private HddsProtos.ReplicationType type
-      = HddsProtos.ReplicationType.STAND_ALONE;
+  private HddsProtos.ReplicationType type;
 
   @CommandLine.Option(
       names = {"-f", "--replicationFactor"},
       description = "Replication factor (ONE, THREE)",
       defaultValue = "ONE"
   )
-  private HddsProtos.ReplicationFactor factor
-      = HddsProtos.ReplicationFactor.ONE;
+  private HddsProtos.ReplicationFactor factor;
 
   @Override
-  public Void call() throws Exception {
+  public void execute(ScmClient scmClient) throws IOException {
     if (type == HddsProtos.ReplicationType.CHAINED) {
       throw new IllegalArgumentException(type.name()
           + " is not supported yet.");
     }
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      Pipeline pipeline = scmClient.createReplicationPipeline(
-          type,
-          factor,
-          HddsProtos.NodePool.getDefaultInstance());
+    Pipeline pipeline = scmClient.createReplicationPipeline(
+        type,
+        factor,
+        HddsProtos.NodePool.getDefaultInstance());
 
-      if (pipeline != null) {
-        System.out.println(pipeline.getId().toString() +
-            " is created. Factor: " + pipeline.getFactor() +
-            ", Type: " + pipeline.getType());
-      }
-      return null;
+    if (pipeline != null) {
+      System.out.println(pipeline.getId().toString() +
+          " is created. Factor: " + pipeline.getFactor() +
+          ", Type: " + pipeline.getType());
     }
   }
-}
\ No newline at end of file
+}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/DeactivatePipelineSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/DeactivatePipelineSubcommand.java
index 4f4f741..70df4d9 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/DeactivatePipelineSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/DeactivatePipelineSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,10 +20,11 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import picocli.CommandLine;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 
 /**
  * Handler of deactivate pipeline command.
@@ -33,20 +34,14 @@ import java.util.concurrent.Callable;
     description = "Deactivates the given Pipeline",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class DeactivatePipelineSubcommand implements Callable<Void> {
-
-  @CommandLine.ParentCommand
-  private PipelineCommands parent;
+public class DeactivatePipelineSubcommand extends ScmSubcommand {
 
   @CommandLine.Parameters(description = "ID of the pipeline to deactivate")
   private String pipelineId;
 
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      scmClient.deactivatePipeline(
-          HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
-      return null;
-    }
+  public void execute(ScmClient scmClient) throws IOException {
+    scmClient.deactivatePipeline(
+        HddsProtos.PipelineID.newBuilder().setId(pipelineId).build());
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
index 729daea..58ae26e 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/ListPipelinesSubcommand.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,11 +20,12 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
 
 import com.google.common.base.Strings;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import picocli.CommandLine;
 
-import java.util.concurrent.Callable;
+import java.io.IOException;
 import java.util.stream.Stream;
 
 /**
@@ -35,38 +36,29 @@ import java.util.stream.Stream;
     description = "List all active pipelines",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
-public class ListPipelinesSubcommand implements Callable<Void> {
-
-  @CommandLine.ParentCommand
-  private PipelineCommands parent;
+public class ListPipelinesSubcommand extends ScmSubcommand {
 
   @CommandLine.Option(names = {"-ffc", "--filterByFactor"},
       description = "Filter listed pipelines by Factor(ONE/one)",
-      defaultValue = "",
-      required = false)
+      defaultValue = "")
   private String factor;
 
   @CommandLine.Option(names = {"-fst", "--filterByState"},
       description = "Filter listed pipelines by State(OPEN/CLOSE)",
-      defaultValue = "",
-      required = false)
+      defaultValue = "")
   private String state;
 
-
   @Override
-  public Void call() throws Exception {
-    try (ScmClient scmClient = parent.getParent().createScmClient()) {
-      Stream<Pipeline> stream = scmClient.listPipelines().stream();
-      if (!Strings.isNullOrEmpty(factor)) {
-        stream = stream.filter(
-            p -> p.getFactor().toString().compareToIgnoreCase(factor) == 0);
-      }
-      if (!Strings.isNullOrEmpty(state)) {
-        stream = stream.filter(p -> p.getPipelineState().toString()
-            .compareToIgnoreCase(state) == 0);
-      }
-      stream.forEach(System.out::println);
-      return null;
+  public void execute(ScmClient scmClient) throws IOException {
+    Stream<Pipeline> stream = scmClient.listPipelines().stream();
+    if (!Strings.isNullOrEmpty(factor)) {
+      stream = stream.filter(
+          p -> p.getFactor().toString().compareToIgnoreCase(factor) == 0);
+    }
+    if (!Strings.isNullOrEmpty(state)) {
+      stream = stream.filter(p -> p.getPipelineState().toString()
+          .compareToIgnoreCase(state) == 0);
     }
+    stream.forEach(System.out::println);
   }
 }
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/PipelineCommands.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/PipelineCommands.java
index d5c0234..ba7371e 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/PipelineCommands.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/PipelineCommands.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,11 +21,12 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.GenericCli;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.scm.cli.container.WithScmClient;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
+import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 
+import org.kohsuke.MetaInfServices;
 import picocli.CommandLine.Command;
 import picocli.CommandLine.Model.CommandSpec;
-import picocli.CommandLine.ParentCommand;
 import picocli.CommandLine.Spec;
 
 /**
@@ -43,21 +44,20 @@ import picocli.CommandLine.Spec;
         CreatePipelineSubcommand.class,
         ClosePipelineSubcommand.class
     })
-public class PipelineCommands implements Callable<Void> {
+@MetaInfServices(SubcommandWithParent.class)
+public class PipelineCommands implements Callable<Void>, SubcommandWithParent {
 
   @Spec
   private CommandSpec spec;
 
-  @ParentCommand
-  private WithScmClient parent;
-
-  public WithScmClient getParent() {
-    return parent;
-  }
-
   @Override
   public Void call() throws Exception {
     GenericCli.missingSubcommand(spec);
     return null;
   }
+
+  @Override
+  public Class<?> getParentType() {
+    return OzoneAdmin.class;
+  }
 }
diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/admin.robot
similarity index 58%
copy from hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot
copy to hadoop-ozone/dist/src/main/smoketest/admincli/admin.robot
index cb16bc8..a28888b 100644
--- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/admincli/admin.robot
@@ -14,17 +14,19 @@
 # limitations under the License.
 
 *** Settings ***
-Documentation       Smoketest ozone cluster startup
-Library             OperatingSystem
+Documentation       Test ozone admin command
 Library             BuiltIn
 Resource            ../commonlib.robot
 Test Timeout        5 minutes
 
-*** Variables ***
-
-
 *** Test Cases ***
-Run list datanodes
-    ${output} =         Execute          ozone admin datanode list
-                        Should contain   ${output}   Datanode:
-                        Should contain   ${output}   Related pipelines:
\ No newline at end of file
+Incomplete command
+    ${output} =         Execute And Ignore Error     ozone admin
+                        Should contain   ${output}   Incomplete command
+                        Should contain   ${output}   container
+                        Should contain   ${output}   datanode
+                        Should contain   ${output}   om
+                        Should contain   ${output}   pipeline
+                        Should contain   ${output}   replicationmanager
+                        Should contain   ${output}   safemode
+                        Should contain   ${output}   printTopology
diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/container.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/container.robot
new file mode 100644
index 0000000..0560880
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/admincli/container.robot
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       Test ozone admin container command
+Library             BuiltIn
+Resource            ../commonlib.robot
+Test Timeout        5 minutes
+Suite Setup         Create test data
+
+*** Variables ***
+${CONTAINER}
+
+*** Keywords ***
+Create test data
+    Run Keyword if      '${SECURITY_ENABLED}' == 'true'     Kinit test user     testuser     testuser.keytab
+                        Execute          ozone freon ockg -n1 -t1 -p container
+
+*** Test Cases ***
+Create container
+    ${output} =         Execute          ozone admin container create
+                        Should contain   ${output}   is created
+    ${container} =      Execute          echo "${output}" | grep 'is created' | cut -f2 -d' '
+                        Set Suite Variable    ${CONTAINER}    ${container}
+
+List containers
+    ${output} =         Execute          ozone admin container list
+                        Should contain   ${output}   OPEN
+
+List containers with explicit host
+    ${output} =         Execute          ozone admin container list --scm scm
+                        Should contain   ${output}   OPEN
+
+Container info
+    ${output} =         Execute          ozone admin container info "${CONTAINER}"
+                        Should contain   ${output}   Container id: ${CONTAINER}
+                        Should contain   ${output}   Datanodes
+
+Close container
+                        Execute          ozone admin container close "${CONTAINER}"
+    ${output} =         Execute          ozone admin container info "${CONTAINER}"
+                        Should contain   ${output}   CLOS
+
+Incomplete command
+    ${output} =         Execute And Ignore Error     ozone admin container
+                        Should contain   ${output}   Incomplete command
+                        Should contain   ${output}   list
+                        Should contain   ${output}   info
+                        Should contain   ${output}   delete
+                        Should contain   ${output}   create
+                        Should contain   ${output}   close
+
+List containers on unknown host
+    ${output} =         Execute And Ignore Error     ozone admin --verbose container list --scm unknown-host
+                        Should contain   ${output}   Invalid host name
+
diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot
index cb16bc8..b34f3af 100644
--- a/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/admincli/datanode.robot
@@ -14,17 +14,22 @@
 # limitations under the License.
 
 *** Settings ***
-Documentation       Smoketest ozone cluster startup
-Library             OperatingSystem
+Documentation       Test ozone admin datanode command
 Library             BuiltIn
 Resource            ../commonlib.robot
 Test Timeout        5 minutes
 
-*** Variables ***
-
-
 *** Test Cases ***
-Run list datanodes
+List datanodes
     ${output} =         Execute          ozone admin datanode list
                         Should contain   ${output}   Datanode:
-                        Should contain   ${output}   Related pipelines:
\ No newline at end of file
+                        Should contain   ${output}   Related pipelines:
+
+Incomplete command
+    ${output} =         Execute And Ignore Error     ozone admin datanode
+                        Should contain   ${output}   Incomplete command
+                        Should contain   ${output}   list
+
+List datanodes on unknown host
+    ${output} =         Execute And Ignore Error     ozone admin --verbose datanode list --scm unknown-host
+                        Should contain   ${output}   Invalid host name
diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/pipeline.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/pipeline.robot
index b514ae7..3a97f83 100644
--- a/hadoop-ozone/dist/src/main/smoketest/admincli/pipeline.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/admincli/pipeline.robot
@@ -14,21 +14,52 @@
 # limitations under the License.
 
 *** Settings ***
-Documentation       Smoketest ozone cluster startup
-Library             OperatingSystem
+Documentation       Test ozone admin pipeline command
 Library             BuiltIn
 Resource            ../commonlib.robot
 Test Timeout        5 minutes
 
 *** Variables ***
-
+${PIPELINE}
 
 *** Test Cases ***
-Run list pipeline
+Create pipeline
+    ${output} =         Execute          ozone admin pipeline create
+                        Should contain   ${output}   is created. Factor: ONE, Type: STAND_ALONE
+    ${pipeline} =       Execute          echo "${output}" | grep 'is created' | cut -f1 -d' ' | cut -f2 -d'='
+                        Set Suite Variable    ${PIPELINE}    ${pipeline}
+
+List pipelines
     ${output} =         Execute          ozone admin pipeline list
-                        Should contain   ${output}   Type:
-                        Should contain   ${output}   Factor:ONE, State:
+                        Should contain   ${output}   Factor:ONE
 
-Run create pipeline
-    ${output} =         Execute          ozone admin pipeline create
-                        Should contain   ${output}   is created. Factor: ONE, Type: STAND_ALONE
\ No newline at end of file
+List pipelines with explicit host
+    ${output} =         Execute          ozone admin pipeline list --scm scm
+                        Should contain   ${output}   Factor:ONE
+
+Deactivate pipeline
+                        Execute          ozone admin pipeline deactivate "${PIPELINE}"
+    ${output} =         Execute          ozone admin pipeline list | grep "${PIPELINE}"
+                        Should contain   ${output}   DORMANT
+
+Activate pipeline
+                        Execute          ozone admin pipeline activate "${PIPELINE}"
+    ${output} =         Execute          ozone admin pipeline list | grep "${PIPELINE}"
+                        Should contain   ${output}   OPEN
+
+Close pipeline
+                        Execute          ozone admin pipeline close "${PIPELINE}"
+    ${output} =         Execute          ozone admin pipeline list | grep "${PIPELINE}"
+                        Should contain   ${output}   CLOSED
+
+Incomplete command
+    ${output} =         Execute And Ignore Error     ozone admin pipeline
+                        Should contain   ${output}   Incomplete command
+                        Should contain   ${output}   close
+                        Should contain   ${output}   create
+                        Should contain   ${output}   deactivate
+                        Should contain   ${output}   list
+
+List pipelines on unknown host
+    ${output} =         Execute And Ignore Error     ozone admin --verbose pipeline list --scm unknown-host
+                        Should contain   ${output}   Invalid host name
diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/replicationmanager.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/replicationmanager.robot
new file mode 100644
index 0000000..cef294f
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/admincli/replicationmanager.robot
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       Test ozone admin replicationmanager command
+Library             BuiltIn
+Resource            ../commonlib.robot
+Test Timeout        5 minutes
+
+*** Test Cases ***
+Check replicationmanager
+    ${output} =         Execute          ozone admin replicationmanager status
+                        Should contain   ${output}   ReplicationManager
+                        Should contain   ${output}   Running
+
+Check replicationmanager with explicit host
+    ${output} =         Execute          ozone admin replicationmanager status --scm scm
+                        Should contain   ${output}   ReplicationManager
+                        Should contain   ${output}   Running
+
+Start replicationmanager
+    ${output} =         Execute          ozone admin replicationmanager start
+                        Should contain   ${output}   Starting ReplicationManager
+                        Wait Until Keyword Succeeds    30sec    5sec    Execute          ozone admin replicationmanager status | grep -q 'is Running'
+
+Stop replicationmanager
+    ${output} =         Execute          ozone admin replicationmanager stop
+                        Should contain   ${output}   Stopping ReplicationManager
+                        Wait Until Keyword Succeeds    30sec    5sec    Execute          ozone admin replicationmanager status | grep -q 'is Not Running'
+
+Incomplete command
+    ${output} =         Execute And Ignore Error     ozone admin replicationmanager
+                        Should contain   ${output}   Incomplete command
+                        Should contain   ${output}   start
+                        Should contain   ${output}   stop
+                        Should contain   ${output}   status
+
+Check replicationmanager on unknown host
+    ${output} =         Execute And Ignore Error     ozone admin --verbose replicationmanager status --scm unknown-host
+                        Should contain   ${output}   Invalid host name
+
diff --git a/hadoop-ozone/dist/src/main/smoketest/admincli/safemode.robot b/hadoop-ozone/dist/src/main/smoketest/admincli/safemode.robot
new file mode 100644
index 0000000..114d846
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/admincli/safemode.robot
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       Test ozone admin safemode command
+Library             BuiltIn
+Resource            ../commonlib.robot
+Test Timeout        5 minutes
+
+*** Test Cases ***
+Check safemode
+    ${output} =         Execute          ozone admin safemode status
+                        Should contain   ${output}   SCM is out of safe mode
+
+Check safemode with explicit host
+    ${output} =         Execute          ozone admin safemode status --scm scm
+                        Should contain   ${output}   SCM is out of safe mode
+
+Wait for safemode exit
+    ${output} =         Execute          ozone admin safemode wait -t 2
+                        Should contain   ${output}   SCM is out of safe mode
+
+Incomplete command
+    ${output} =         Execute And Ignore Error     ozone admin safemode
+                        Should contain   ${output}   Incomplete command
+                        Should contain   ${output}   status
+                        Should contain   ${output}   exit
+                        Should contain   ${output}   wait
+
+Check safemode on unknown host
+    ${output} =         Execute And Ignore Error     ozone admin --verbose safemode status --scm unknown-host
+                        Should contain   ${output}   Invalid host name
+
diff --git a/hadoop-ozone/dist/src/shell/ozone/ozone b/hadoop-ozone/dist/src/shell/ozone/ozone
index e957f7f..c536484 100755
--- a/hadoop-ozone/dist/src/shell/ozone/ozone
+++ b/hadoop-ozone/dist/src/shell/ozone/ozone
@@ -214,7 +214,7 @@ function ozonecmd_case
       OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
     ;;
     admin)
-      HADOOP_CLASSNAME=org.apache.hadoop.ozone.admin.OzoneAdmin
+      HADOOP_CLASSNAME=org.apache.hadoop.hdds.cli.OzoneAdmin
       OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
     ;;
     debug)
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneDatanodeShell.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneDatanodeShell.java
index c2ed02e..d52dd33 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneDatanodeShell.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneDatanodeShell.java
@@ -130,7 +130,7 @@ public class TestOzoneDatanodeShell {
   @Test
   public void testDatanodeInvalidParamCommand() {
     LOG.info("Running testDatanodeIncompleteCommand");
-    String expectedError = "Unknown option: -invalidParam";
+    String expectedError = "Unknown option: '-invalidParam'";
     //executing 'ozone datanode -invalidParam'
     String[] args = new String[]{"-invalidParam"};
 
diff --git a/hadoop-ozone/tools/pom.xml b/hadoop-ozone/tools/pom.xml
index 661d542..cc97e3b 100644
--- a/hadoop-ozone/tools/pom.xml
+++ b/hadoop-ozone/tools/pom.xml
@@ -118,8 +118,6 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
     <dependency>
       <groupId>org.kohsuke.metainf-services</groupId>
       <artifactId>metainf-services</artifactId>
-      <version>1.1</version>
-      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>com.github.spotbugs</groupId>
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/OzoneAdmin.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/OzoneAdmin.java
deleted file mode 100644
index 7f748aa..0000000
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/OzoneAdmin.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.admin;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hdds.HddsUtils;
-import org.apache.hadoop.hdds.cli.GenericCli;
-import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.conf.MutableConfigurationSource;
-import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.hdds.scm.ScmConfigKeys;
-import org.apache.hadoop.hdds.scm.cli.ContainerOperationClient;
-import org.apache.hadoop.hdds.scm.cli.ReplicationManagerCommands;
-import org.apache.hadoop.hdds.scm.cli.SafeModeCommands;
-import org.apache.hadoop.hdds.scm.cli.TopologySubcommand;
-import org.apache.hadoop.hdds.scm.cli.container.ContainerCommands;
-import org.apache.hadoop.hdds.scm.cli.container.WithScmClient;
-import org.apache.hadoop.hdds.scm.cli.datanode.DatanodeCommands;
-import org.apache.hadoop.hdds.scm.cli.pipeline.PipelineCommands;
-import org.apache.hadoop.hdds.scm.client.ScmClient;
-import org.apache.hadoop.util.NativeCodeLoader;
-
-import org.apache.commons.lang3.StringUtils;
-import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-import picocli.CommandLine;
-import picocli.CommandLine.Option;
-
-/**
- * Ozone Admin Command line tool.
- */
-@CommandLine.Command(name = "ozone admin",
-    hidden = true,
-    description = "Developer tools for Ozone Admin operations",
-    versionProvider = HddsVersionProvider.class,
-    subcommands = {
-        SafeModeCommands.class,
-        ContainerCommands.class,
-        PipelineCommands.class,
-        DatanodeCommands.class,
-        TopologySubcommand.class,
-        ReplicationManagerCommands.class
-    },
-    mixinStandardHelpOptions = true)
-public class OzoneAdmin extends GenericCli implements WithScmClient {
-
-  private OzoneConfiguration ozoneConf;
-
-  @Option(names = {"--scm"}, description = "The destination scm (host:port)")
-  private String scm = "";
-
-  public OzoneAdmin() {
-    super(OzoneAdmin.class);
-  }
-
-  public OzoneConfiguration getOzoneConf() {
-    if (ozoneConf == null) {
-      ozoneConf = createOzoneConfiguration();
-    }
-    return ozoneConf;
-  }
-
-  /**
-   * Main for the Ozone Admin shell Command handling.
-   *
-   * @param argv - System Args Strings[]
-   * @throws Exception
-   */
-  public static void main(String[] argv) throws Exception {
-    LogManager.resetConfiguration();
-    Logger.getRootLogger().setLevel(Level.INFO);
-    Logger.getRootLogger()
-        .addAppender(new ConsoleAppender(new PatternLayout("%m%n")));
-    Logger.getLogger(NativeCodeLoader.class).setLevel(Level.ERROR);
-
-    new OzoneAdmin().run(argv);
-  }
-
-  public ScmClient createScmClient() {
-    try {
-      OzoneConfiguration conf = createOzoneConfiguration();
-      checkAndSetSCMAddressArg(conf);
-
-      return new ContainerOperationClient(conf);
-    } catch (IOException ex) {
-      throw new IllegalArgumentException("Can't create SCM client", ex);
-    }
-  }
-
-  private void checkAndSetSCMAddressArg(MutableConfigurationSource conf) {
-    if (StringUtils.isNotEmpty(scm)) {
-      conf.set(OZONE_SCM_CLIENT_ADDRESS_KEY, scm);
-    }
-    if (!HddsUtils.getHostNameFromConfigKeys(conf,
-        ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY).isPresent()) {
-
-      throw new IllegalArgumentException(
-          ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY
-              + " should be set in ozone-site.xml or with the --scm option");
-    }
-  }
-}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/om/OMAdmin.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/om/OMAdmin.java
index ba5fe81..f9321ab 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/om/OMAdmin.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/om/OMAdmin.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.ozone.admin.om;
 
 import org.apache.hadoop.hdds.cli.GenericCli;
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
+import org.apache.hadoop.hdds.cli.OzoneAdmin;
 import org.apache.hadoop.hdds.cli.SubcommandWithParent;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ozone.OmUtils;
-import org.apache.hadoop.ozone.admin.OzoneAdmin;
 import org.apache.hadoop.ozone.client.OzoneClientException;
 import org.apache.hadoop.ozone.client.OzoneClientFactory;
 import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
diff --git a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/genconf/TestGenerateOzoneRequiredConfigurations.java b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/genconf/TestGenerateOzoneRequiredConfigurations.java
index 9279d7f..e520190 100644
--- a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/genconf/TestGenerateOzoneRequiredConfigurations.java
+++ b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/genconf/TestGenerateOzoneRequiredConfigurations.java
@@ -143,7 +143,8 @@ public class TestGenerateOzoneRequiredConfigurations {
       cmd.parseWithHandlers(new CommandLine.RunLast(),
           exceptionHandler, args);
     }catch(Exception ex){
-      Assert.assertTrue(ex.getMessage().contains(msg));
+      Assert.assertTrue("Expected " + msg + ", but got: " + ex.getMessage(),
+          ex.getMessage().contains(msg));
     }
   }
 
@@ -225,7 +226,7 @@ public class TestGenerateOzoneRequiredConfigurations {
   public void genconfPathNotSpecified() throws Exception {
     File tempPath = getRandomTempDir();
     String[] args = new String[]{};
-    executeWithException(args, "Missing required parameter: <path>");
+    executeWithException(args, "Missing required parameter: '<path>'");
   }
 
   /**
diff --git a/pom.xml b/pom.xml
index f4b6414..b2984e9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -260,7 +260,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs
       <dependency>
         <groupId>info.picocli</groupId>
         <artifactId>picocli</artifactId>
-        <version>3.9.6</version>
+        <version>4.4.0</version>
       </dependency>
       <dependency>
         <groupId>jdiff</groupId>
@@ -1333,6 +1333,12 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xs
         <version>${hsqldb.version}</version>
       </dependency>
       <dependency>
+        <groupId>org.kohsuke.metainf-services</groupId>
+        <artifactId>metainf-services</artifactId>
+        <version>1.1</version>
+        <optional>true</optional>
+      </dependency>
+      <dependency>
         <groupId>io.dropwizard.metrics</groupId>
         <artifactId>metrics-core</artifactId>
         <version>3.2.4</version>


---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: ozone-commits-help@hadoop.apache.org