You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by sa...@apache.org on 2020/02/10 02:23:28 UTC

[hadoop-ozone] 10/18: HDDS-2650 Fix createPipeline CLI and make it message based. (#370)

This is an automated email from the ASF dual-hosted git repository.

sammichen pushed a commit to branch HDDS-1564
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git

commit 6cdfdcb4dc16cc28f6c4f419c455913fa316e965
Author: Li Cheng <bl...@gmail.com>
AuthorDate: Wed Dec 18 22:27:04 2019 +0800

    HDDS-2650 Fix createPipeline CLI and make it message based. (#370)
---
 ...inerLocationProtocolServerSideTranslatorPB.java | 26 ++++++++++++++++++++++
 .../scm/cli/pipeline/CreatePipelineSubcommand.java |  7 +++---
 2 files changed, 29 insertions(+), 4 deletions(-)

diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java
index 5ff75e7..f2e4253 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java
@@ -23,6 +23,7 @@ import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.PipelineResponseProto;
 import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ActivatePipelineRequestProto;
 import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ActivatePipelineResponseProto;
 import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ClosePipelineRequestProto;
@@ -72,6 +73,9 @@ import com.google.protobuf.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.PipelineResponseProto.Error.errorPipelineAlreadyExists;
+import static org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.PipelineResponseProto.Error.success;
+
 /**
  * This class is the server-side translator that forwards requests received on
  * {@link StorageContainerLocationProtocolPB} to the
@@ -160,6 +164,12 @@ public final class StorageContainerLocationProtocolServerSideTranslatorPB
             .setScmCloseContainerResponse(closeContainer(
                 request.getScmCloseContainerRequest()))
             .build();
+      case AllocatePipeline:
+        return ScmContainerLocationResponse.newBuilder()
+            .setCmdType(request.getCmdType())
+            .setStatus(Status.OK)
+            .setPipelineResponse(allocatePipeline(request.getPipelineRequest()))
+            .build();
       case ListPipelines:
         return ScmContainerLocationResponse.newBuilder()
             .setCmdType(request.getCmdType())
@@ -327,6 +337,22 @@ public final class StorageContainerLocationProtocolServerSideTranslatorPB
     return SCMCloseContainerResponseProto.newBuilder().build();
   }
 
+  public PipelineResponseProto allocatePipeline(
+      StorageContainerLocationProtocolProtos.PipelineRequestProto request)
+      throws IOException {
+    Pipeline pipeline = impl.createReplicationPipeline(
+        request.getReplicationType(), request.getReplicationFactor(),
+        HddsProtos.NodePool.getDefaultInstance());
+    if (pipeline == null) {
+      return PipelineResponseProto.newBuilder()
+          .setErrorCode(errorPipelineAlreadyExists).build();
+    }
+    PipelineResponseProto response = PipelineResponseProto.newBuilder()
+        .setErrorCode(success)
+        .setPipeline(pipeline.getProtobufMessage()).build();
+    return response;
+  }
+
   public ListPipelineResponseProto listPipelines(
       ListPipelineRequestProto request)
       throws IOException {
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
index edeb786..58a1778 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/pipeline/CreatePipelineSubcommand.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdds.scm.cli.pipeline;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.scm.cli.SCMCLI;
 import org.apache.hadoop.hdds.scm.client.ScmClient;
 import picocli.CommandLine;
 
@@ -30,13 +29,13 @@ import java.util.concurrent.Callable;
  * Handler of createPipeline command.
  */
 @CommandLine.Command(
-    name = "createPipeline",
+    name = "create",
     description = "create pipeline",
     mixinStandardHelpOptions = true,
     versionProvider = HddsVersionProvider.class)
 public class CreatePipelineSubcommand implements Callable<Void> {
   @CommandLine.ParentCommand
-  private SCMCLI parent;
+  private PipelineCommands parent;
 
   @CommandLine.Option(
       names = {"-t", "--replicationType"},
@@ -60,7 +59,7 @@ public class CreatePipelineSubcommand implements Callable<Void> {
       throw new IllegalArgumentException(type.name()
           + " is not supported yet.");
     }
-    try (ScmClient scmClient = parent.createScmClient()) {
+    try (ScmClient scmClient = parent.getParent().createScmClient()) {
       scmClient.createReplicationPipeline(
           type,
           factor,


---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: ozone-commits-help@hadoop.apache.org