You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ozone.apache.org by so...@apache.org on 2021/05/04 11:08:20 UTC
[ozone] branch HDDS-3816-ec updated: HDDS-5145. Extend
Pipline/ReplicationConfig refactor with ECReplicationConfig (#2206)
This is an automated email from the ASF dual-hosted git repository.
sodonnell pushed a commit to branch HDDS-3816-ec
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/HDDS-3816-ec by this push:
new 1d5c593 HDDS-5145. Extend Pipline/ReplicationConfig refactor with ECReplicationConfig (#2206)
1d5c593 is described below
commit 1d5c593319fcc520760d11034f7275ff62d158eb
Author: Elek, Márton <el...@users.noreply.github.com>
AuthorDate: Tue May 4 13:07:58 2021 +0200
HDDS-5145. Extend Pipline/ReplicationConfig refactor with ECReplicationConfig (#2206)
---
.../hadoop/hdds/client/ECReplicationConfig.java | 1 +
.../apache/hadoop/hdds/scm/pipeline/Pipeline.java | 83 ++++++++++++----------
.../hadoop/hdds/scm/pipeline/MockPipeline.java | 19 +++++
.../hadoop/hdds/scm/pipeline/TestPipeline.java | 20 ++++++
.../interface-client/src/main/proto/hdds.proto | 1 +
...lockLocationProtocolServerSideTranslatorPB.java | 3 +-
6 files changed, 88 insertions(+), 39 deletions(-)
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java
index 1d9da50..5b7d033 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java
@@ -91,4 +91,5 @@ public class ECReplicationConfig implements ReplicationConfig {
public int hashCode() {
return Objects.hash(data, parity);
}
+
}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
index 90a6d94..c440b64 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
@@ -34,6 +34,7 @@ import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
@@ -278,6 +279,44 @@ public final class Pipeline {
return replicationConfig;
}
+ public static Pipeline getFromProtobuf(HddsProtos.Pipeline pipeline)
+ throws UnknownPipelineStateException {
+ Preconditions.checkNotNull(pipeline, "Pipeline is null");
+
+ List<DatanodeDetails> nodes = new ArrayList<>();
+ for (DatanodeDetailsProto member : pipeline.getMembersList()) {
+ nodes.add(DatanodeDetails.getFromProtoBuf(member));
+ }
+ UUID leaderId = null;
+ if (pipeline.hasLeaderID128()) {
+ HddsProtos.UUID uuid = pipeline.getLeaderID128();
+ leaderId = new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits());
+ } else if (pipeline.hasLeaderID() &&
+ StringUtils.isNotEmpty(pipeline.getLeaderID())) {
+ leaderId = UUID.fromString(pipeline.getLeaderID());
+ }
+
+ UUID suggestedLeaderId = null;
+ if (pipeline.hasSuggestedLeaderID()) {
+ HddsProtos.UUID uuid = pipeline.getSuggestedLeaderID();
+ suggestedLeaderId =
+ new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits());
+ }
+
+ final ReplicationConfig config = ReplicationConfig
+ .fromProto(pipeline.getType(), pipeline.getFactor(),
+ pipeline.getEcReplicationConfig());
+ return new Builder().setId(PipelineID.getFromProtobuf(pipeline.getId()))
+ .setReplicationConfig(config)
+ .setState(PipelineState.fromProtobuf(pipeline.getState()))
+ .setNodes(nodes)
+ .setLeaderId(leaderId)
+ .setSuggestedLeaderId(suggestedLeaderId)
+ .setNodesInOrder(pipeline.getMemberOrdersList())
+ .setCreateTimestamp(pipeline.getCreationTimeStamp())
+ .build();
+ }
+
public HddsProtos.Pipeline getProtobufMessage(int clientVersion)
throws UnknownPipelineStateException {
@@ -292,13 +331,18 @@ public final class Pipeline {
HddsProtos.Pipeline.Builder builder = HddsProtos.Pipeline.newBuilder()
.setId(id.getProtobuf())
.setType(replicationConfig.getReplicationType())
- .setFactor(ReplicationConfig.getLegacyFactor(replicationConfig))
.setState(PipelineState.getProtobuf(state))
.setLeaderID(leaderId != null ? leaderId.toString() : "")
.setCreationTimeStamp(creationTimestamp.toEpochMilli())
.addAllMembers(members)
.addAllMemberReplicaIndexes(memberReplicaIndexes);
+ if (replicationConfig instanceof ECReplicationConfig) {
+ builder.setEcReplicationConfig(((ECReplicationConfig) replicationConfig)
+ .toProto());
+ } else {
+ builder.setFactor(ReplicationConfig.getLegacyFactor(replicationConfig));
+ }
if (leaderId != null) {
HddsProtos.UUID uuid128 = HddsProtos.UUID.newBuilder()
.setMostSigBits(leaderId.getMostSignificantBits())
@@ -335,43 +379,6 @@ public final class Pipeline {
return builder.build();
}
- public static Pipeline getFromProtobuf(HddsProtos.Pipeline pipeline)
- throws UnknownPipelineStateException {
- Preconditions.checkNotNull(pipeline, "Pipeline is null");
-
- List<DatanodeDetails> nodes = new ArrayList<>();
- for (DatanodeDetailsProto member : pipeline.getMembersList()) {
- nodes.add(DatanodeDetails.getFromProtoBuf(member));
- }
- UUID leaderId = null;
- if (pipeline.hasLeaderID128()) {
- HddsProtos.UUID uuid = pipeline.getLeaderID128();
- leaderId = new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits());
- } else if (pipeline.hasLeaderID() &&
- StringUtils.isNotEmpty(pipeline.getLeaderID())) {
- leaderId = UUID.fromString(pipeline.getLeaderID());
- }
-
- UUID suggestedLeaderId = null;
- if (pipeline.hasSuggestedLeaderID()) {
- HddsProtos.UUID uuid = pipeline.getSuggestedLeaderID();
- suggestedLeaderId =
- new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits());
- }
-
- final ReplicationConfig config = ReplicationConfig
- .fromProto(pipeline.getType(), pipeline.getFactor());
- return new Builder().setId(PipelineID.getFromProtobuf(pipeline.getId()))
- .setReplicationConfig(config)
- .setState(PipelineState.fromProtobuf(pipeline.getState()))
- .setNodes(nodes)
- .setLeaderId(leaderId)
- .setSuggestedLeaderId(suggestedLeaderId)
- .setNodesInOrder(pipeline.getMemberOrdersList())
- .setCreateTimestamp(pipeline.getCreationTimeStamp())
- .build();
- }
-
@Override
public boolean equals(Object o) {
if (this == o) {
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java
index b7b3dc6..1212b47 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
+import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.RatisReplicationConfig;
import org.apache.hadoop.hdds.client.StandaloneReplicationConfig;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
@@ -87,6 +88,24 @@ public final class MockPipeline {
.build();
}
+ public static Pipeline createEcPipeline() {
+
+ List<DatanodeDetails> nodes = new ArrayList<>();
+ nodes.add(MockDatanodeDetails.randomDatanodeDetails());
+ nodes.add(MockDatanodeDetails.randomDatanodeDetails());
+ nodes.add(MockDatanodeDetails.randomDatanodeDetails());
+ nodes.add(MockDatanodeDetails.randomDatanodeDetails());
+ nodes.add(MockDatanodeDetails.randomDatanodeDetails());
+
+ return Pipeline.newBuilder()
+ .setState(Pipeline.PipelineState.OPEN)
+ .setId(PipelineID.randomId())
+ .setReplicationConfig(
+ new ECReplicationConfig(3, 2))
+ .setNodes(nodes)
+ .build();
+ }
+
private MockPipeline() {
throw new UnsupportedOperationException("no instances");
}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
index 504f949..2cf3bf0 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hdds.scm.pipeline;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
@@ -48,4 +49,23 @@ public class TestPipeline {
assertPorts(dn, ALL_PORTS);
}
}
+
+ @Test
+ public void getProtobufMessageEC() throws IOException {
+ Pipeline subject = MockPipeline.createPipeline(3);
+
+ //when EC config is empty/null
+ HddsProtos.Pipeline protobufMessage = subject.getProtobufMessage(1);
+ Assert.assertEquals(0, protobufMessage.getEcReplicationConfig().getData());
+
+
+ //when EC config is NOT empty
+ subject = MockPipeline.createEcPipeline();
+
+ protobufMessage = subject.getProtobufMessage(1);
+ Assert.assertEquals(3, protobufMessage.getEcReplicationConfig().getData());
+ Assert
+ .assertEquals(2, protobufMessage.getEcReplicationConfig().getParity());
+
+ }
}
diff --git a/hadoop-hdds/interface-client/src/main/proto/hdds.proto b/hadoop-hdds/interface-client/src/main/proto/hdds.proto
index dbbb284..ba8f916 100644
--- a/hadoop-hdds/interface-client/src/main/proto/hdds.proto
+++ b/hadoop-hdds/interface-client/src/main/proto/hdds.proto
@@ -114,6 +114,7 @@ message Pipeline {
optional uint64 creationTimeStamp = 8;
optional UUID suggestedLeaderID = 9;
repeated uint32 memberReplicaIndexes = 10;
+ optional ECReplicationConfig ecReplicationConfig = 11;
// TODO(runzhiwang): when leaderID is gone, specify 6 as the index of leaderID128
optional UUID leaderID128 = 100;
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java
index 1938afe..e95fc07 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java
@@ -181,7 +181,8 @@ public final class ScmBlockLocationProtocolServerSideTranslatorPB
request.getNumBlocks(),
ReplicationConfig.fromProto(
request.getType(),
- request.getFactor()),
+ request.getFactor(),
+ request.getEcReplicationConfig()),
request.getOwner(),
ExcludeList.getFromProtoBuf(request.getExcludeList()));
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@ozone.apache.org
For additional commands, e-mail: commits-help@ozone.apache.org