You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by xk...@apache.org on 2018/08/31 16:10:53 UTC
[27/47] hadoop git commit: HDDS-382. Remove
RatisTestHelper#RatisTestSuite constructor argument and fix checkstyle in
ContainerTestHelper, GenericTestUtils Contributed by Nandakumar.
HDDS-382. Remove RatisTestHelper#RatisTestSuite constructor argument and fix checkstyle in ContainerTestHelper, GenericTestUtils
Contributed by Nandakumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c5629d54
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c5629d54
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c5629d54
Branch: refs/heads/HDFS-12943
Commit: c5629d546d64091a14560df488a7f797a150337e
Parents: 33f42ef
Author: Anu Engineer <ae...@apache.org>
Authored: Tue Aug 28 14:06:19 2018 -0700
Committer: Anu Engineer <ae...@apache.org>
Committed: Tue Aug 28 14:06:19 2018 -0700
----------------------------------------------------------------------
.../apache/hadoop/hdds/scm/XceiverClient.java | 6 +--
.../hadoop/hdds/scm/XceiverClientGrpc.java | 6 +--
.../hadoop/hdds/scm/XceiverClientManager.java | 2 +-
.../hdds/scm/storage/ChunkInputStream.java | 7 +--
.../hdds/scm/storage/ChunkOutputStream.java | 4 +-
.../org/apache/hadoop/hdds/client/BlockID.java | 5 +-
.../hadoop/hdds/scm/XceiverClientSpi.java | 2 -
.../common/helpers/AllocatedBlock.java | 4 +-
.../container/common/helpers/ContainerInfo.java | 12 ++---
.../common/helpers/ContainerWithPipeline.java | 7 +--
.../scm/container/common/helpers/Pipeline.java | 11 ++---
.../StorageContainerLocationProtocol.java | 6 ++-
...rLocationProtocolClientSideTranslatorPB.java | 21 ++++----
.../scm/storage/ContainerProtocolCalls.java | 6 +--
.../org/apache/hadoop/ozone/OzoneConsts.java | 5 --
.../ozone/container/common/helpers/KeyData.java | 8 ++--
.../apache/hadoop/utils/HddsVersionInfo.java | 6 ++-
.../apache/hadoop/utils/TestMetadataStore.java | 1 -
.../hadoop/ozone/HddsDatanodeService.java | 3 +-
.../common/helpers/ContainerUtils.java | 22 ++++-----
.../container/common/impl/ContainerSet.java | 2 +-
.../common/impl/OpenContainerBlockMap.java | 19 ++++----
.../server/ratis/XceiverServerRatis.java | 6 +--
.../keyvalue/interfaces/KeyManager.java | 4 +-
.../ozone/protocol/commands/CommandStatus.java | 16 +++----
.../ozone/container/common/ScmTestMock.java | 6 ++-
.../common/interfaces/TestHandler.java | 7 ---
.../endpoint/TestHeartbeatEndpointTask.java | 2 -
.../TestRoundRobinVolumeChoosingPolicy.java | 5 +-
.../container/ozoneimpl/TestOzoneContainer.java | 3 +-
.../hadoop/hdds/server/events/EventWatcher.java | 6 ++-
.../hdds/server/events/TestEventQueue.java | 3 --
.../hadoop/hdds/scm/block/BlockManagerImpl.java | 18 +++----
.../hdds/scm/block/DeletedBlockLogImpl.java | 3 +-
.../hdds/scm/block/SCMBlockDeletingService.java | 4 +-
.../container/CloseContainerEventHandler.java | 4 +-
.../hdds/scm/container/ContainerMapping.java | 4 +-
.../scm/container/ContainerStateManager.java | 7 +--
.../replication/ReplicationManager.java | 2 +-
.../scm/container/states/ContainerStateMap.java | 2 +-
.../hdds/scm/node/states/Node2ContainerMap.java | 4 +-
.../scm/node/states/NodeNotFoundException.java | 2 -
.../hdds/scm/node/states/ReportResult.java | 3 +-
.../hdds/scm/pipelines/Node2PipelineMap.java | 50 +++++++++-----------
.../hdds/scm/pipelines/PipelineManager.java | 6 +--
.../hdds/scm/pipelines/PipelineSelector.java | 7 +--
.../scm/server/SCMClientProtocolServer.java | 3 +-
.../org/apache/hadoop/hdds/scm/TestUtils.java | 8 ++--
.../hadoop/hdds/scm/block/TestBlockManager.java | 1 -
.../hdds/scm/block/TestDeletedBlockLog.java | 7 +--
.../command/TestCommandStatusReportHandler.java | 22 ++++-----
.../TestCloseContainerEventHandler.java | 1 -
.../scm/container/TestContainerMapping.java | 7 +--
.../container/TestContainerReportHandler.java | 2 +-
.../TestSCMContainerPlacementCapacity.java | 8 ++--
.../TestSCMContainerPlacementRandom.java | 4 +-
.../replication/TestReplicationManager.java | 11 ++---
.../replication/TestReplicationQueue.java | 4 +-
.../hdds/scm/node/TestContainerPlacement.java | 5 +-
.../hadoop/hdds/scm/node/TestNodeManager.java | 3 +-
.../hdds/scm/node/TestNodeReportHandler.java | 3 +-
.../ozone/container/common/TestEndPoint.java | 9 ++--
.../placement/TestContainerPlacement.java | 6 ++-
.../apache/hadoop/ozone/client/ObjectStore.java | 7 ++-
.../hdds/scm/pipeline/TestPipelineClose.java | 4 --
.../apache/hadoop/ozone/RatisTestHelper.java | 8 ++--
.../TestStorageContainerManagerHelper.java | 2 -
.../rpc/TestCloseContainerHandlingByClient.java | 3 +-
.../ozone/container/ContainerTestHelper.java | 2 -
.../common/impl/TestContainerPersistence.java | 1 -
.../ozoneimpl/TestOzoneContainerRatis.java | 3 +-
.../container/ozoneimpl/TestRatisManager.java | 4 +-
.../hadoop/ozone/scm/TestAllocateContainer.java | 2 -
.../hadoop/ozone/web/TestOzoneWebAccess.java | 1 -
.../hadoop/ozone/web/client/TestBuckets.java | 5 +-
.../ozone/web/client/TestBucketsRatis.java | 2 +-
.../hadoop/ozone/web/client/TestKeysRatis.java | 2 +-
.../apache/hadoop/ozone/om/KeyManagerImpl.java | 13 -----
.../apache/hadoop/fs/ozone/OzoneFileSystem.java | 2 -
.../org/apache/hadoop/ozone/scm/cli/SQLCLI.java | 4 --
80 files changed, 226 insertions(+), 282 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
index 097af17..5022618 100644
--- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
@@ -38,7 +38,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
@@ -188,13 +187,12 @@ public class XceiverClient extends XceiverClientSpi {
/**
* Create a pipeline.
*
- * @param pipeline - pipeline to be created.
+ * @param ignored - pipeline to be created.
*/
@Override
- public void createPipeline(Pipeline pipeline)
+ public void createPipeline(Pipeline ignored)
throws IOException {
// For stand alone pipeline, there is no notion called setup pipeline.
- return;
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
index 35bc932..e2416c2 100644
--- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
@@ -38,7 +38,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
@@ -218,13 +217,12 @@ public class XceiverClientGrpc extends XceiverClientSpi {
/**
* Create a pipeline.
*
- * @param pipeline - pipeline to be created.
+ * @param ignored - pipeline to be created.
*/
@Override
- public void createPipeline(Pipeline pipeline)
+ public void createPipeline(Pipeline ignored)
throws IOException {
// For stand alone pipeline, there is no notion called setup pipeline.
- return;
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
index 125e5d5..9762406 100644
--- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
@@ -154,7 +154,7 @@ public class XceiverClientManager implements Closeable {
break;
case CHAINED:
default:
- throw new IOException ("not implemented" + pipeline.getType());
+ throw new IOException("not implemented" + pipeline.getType());
}
client.connect();
return client;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
index 020c684..a969b68 100644
--- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
@@ -65,7 +65,8 @@ public class ChunkInputStream extends InputStream implements Seekable {
* @param chunks list of chunks to read
* @param traceID container protocol call traceID
*/
- public ChunkInputStream(BlockID blockID, XceiverClientManager xceiverClientManager,
+ public ChunkInputStream(
+ BlockID blockID, XceiverClientManager xceiverClientManager,
XceiverClientSpi xceiverClient, List<ChunkInfo> chunks, String traceID) {
this.blockID = blockID;
this.traceID = traceID;
@@ -211,8 +212,8 @@ public class ChunkInputStream extends InputStream implements Seekable {
if (pos < 0 || (chunks.size() == 0 && pos > 0)
|| pos >= chunkOffset[chunks.size() - 1] + chunks.get(chunks.size() - 1)
.getLen()) {
- throw new EOFException(
- "EOF encountered pos: " + pos + " container key: " + blockID.getLocalID());
+ throw new EOFException("EOF encountered pos: " + pos + " container key: "
+ + blockID.getLocalID());
}
if (chunkIndex == -1) {
chunkIndex = Arrays.binarySearch(chunkOffset, pos);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
index 7309434..f2df3fa 100644
--- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
+++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
@@ -76,8 +76,8 @@ public class ChunkOutputStream extends OutputStream {
* @param chunkSize chunk size
*/
public ChunkOutputStream(BlockID blockID, String key,
- XceiverClientManager xceiverClientManager, XceiverClientSpi xceiverClient,
- String traceID, int chunkSize) {
+ XceiverClientManager xceiverClientManager,
+ XceiverClientSpi xceiverClient, String traceID, int chunkSize) {
this.blockID = blockID;
this.key = key;
this.traceID = traceID;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/BlockID.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/BlockID.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/BlockID.java
index 74e90e9..8149740 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/BlockID.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/BlockID.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import java.util.Objects;
/**
- * BlockID of ozone (containerID localID)
+ * BlockID of ozone (containerID localID).
*/
public class BlockID {
private long containerID;
@@ -65,7 +65,8 @@ public class BlockID {
setContainerID(containerID).setLocalID(localID).build();
}
- public static BlockID getFromProtobuf(ContainerProtos.DatanodeBlockID blockID) {
+ public static BlockID getFromProtobuf(
+ ContainerProtos.DatanodeBlockID blockID) {
return new BlockID(blockID.getContainerID(),
blockID.getLocalID());
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
index b29e73d..b3b0da2 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdds.scm;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
@@ -29,7 +28,6 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import java.io.Closeable;
import java.io.IOException;
-import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
index 9b89469..63781a8 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
@@ -43,8 +43,8 @@ public final class AllocatedBlock {
return this;
}
- public Builder setBlockID(BlockID blockID) {
- this.blockID = blockID;
+ public Builder setBlockID(BlockID blockId) {
+ this.blockID = blockId;
return this;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
index 311c118..465f4b9 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
@@ -396,13 +396,13 @@ public class ContainerInfo implements Comparator<ContainerInfo>,
private ReplicationType replicationType;
public Builder setReplicationType(
- ReplicationType replicationType) {
- this.replicationType = replicationType;
+ ReplicationType repType) {
+ this.replicationType = repType;
return this;
}
- public Builder setPipelineID(PipelineID pipelineID) {
- this.pipelineID = pipelineID;
+ public Builder setPipelineID(PipelineID pipelineId) {
+ this.pipelineID = pipelineId;
return this;
}
@@ -447,8 +447,8 @@ public class ContainerInfo implements Comparator<ContainerInfo>,
return this;
}
- public Builder setDeleteTransactionId(long deleteTransactionId) {
- this.deleteTransactionId = deleteTransactionId;
+ public Builder setDeleteTransactionId(long deleteTransactionID) {
+ this.deleteTransactionId = deleteTransactionID;
return this;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
index e71d429..64f42b3 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerWithPipeline.java
@@ -26,8 +26,8 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
/**
* Class wraps ozone container info.
*/
-public class ContainerWithPipeline
- implements Comparator<ContainerWithPipeline>, Comparable<ContainerWithPipeline> {
+public class ContainerWithPipeline implements Comparator<ContainerWithPipeline>,
+ Comparable<ContainerWithPipeline> {
private final ContainerInfo containerInfo;
private final Pipeline pipeline;
@@ -45,7 +45,8 @@ public class ContainerWithPipeline
return pipeline;
}
- public static ContainerWithPipeline fromProtobuf(HddsProtos.ContainerWithPipeline allocatedContainer) {
+ public static ContainerWithPipeline fromProtobuf(
+ HddsProtos.ContainerWithPipeline allocatedContainer) {
return new ContainerWithPipeline(
ContainerInfo.fromProtobuf(allocatedContainer.getContainerInfo()),
Pipeline.getFromProtoBuf(allocatedContainer.getPipeline()));
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
index 9270468..6757262 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
@@ -169,8 +169,8 @@ public class Pipeline {
*/
public List<String> getDatanodeHosts() {
List<String> dataHosts = new ArrayList<>();
- for (DatanodeDetails id :getDatanodes().values()) {
- dataHosts.add(id.getHostName());
+ for (DatanodeDetails datanode : getDatanodes().values()) {
+ dataHosts.add(datanode.getHostName());
}
return dataHosts;
}
@@ -219,7 +219,7 @@ public class Pipeline {
* Update the State of the pipeline.
*/
public void setLifeCycleState(HddsProtos.LifeCycleState nextState) {
- lifeCycleState = nextState;
+ lifeCycleState = nextState;
}
/**
@@ -244,9 +244,8 @@ public class Pipeline {
public String toString() {
final StringBuilder b = new StringBuilder(getClass().getSimpleName())
.append("[");
- getDatanodes().keySet().stream()
- .forEach(id -> b.
- append(id.endsWith(getLeaderID()) ? "*" + id : id));
+ getDatanodes().keySet().forEach(
+ node -> b.append(node.endsWith(getLeaderID()) ? "*" + id : id));
b.append(" id:").append(id);
if (getType() != null) {
b.append(" type:").append(getType().toString());
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
index 581fbd0..c55062b 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
@@ -38,7 +38,8 @@ public interface StorageContainerLocationProtocol {
* set of datanodes that should be used creating this container.
*
*/
- ContainerWithPipeline allocateContainer(HddsProtos.ReplicationType replicationType,
+ ContainerWithPipeline allocateContainer(
+ HddsProtos.ReplicationType replicationType,
HddsProtos.ReplicationFactor factor, String owner)
throws IOException;
@@ -61,7 +62,8 @@ public interface StorageContainerLocationProtocol {
* @return ContainerWithPipeline - the container info with the pipeline.
* @throws IOException
*/
- ContainerWithPipeline getContainerWithPipeline(long containerID) throws IOException;
+ ContainerWithPipeline getContainerWithPipeline(long containerID)
+ throws IOException;
/**
* Ask SCM a list of containers with a range of container names
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
index ac12ea2..0441469 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
@@ -97,8 +97,9 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
* @throws IOException
*/
@Override
- public ContainerWithPipeline allocateContainer(HddsProtos.ReplicationType type,
- HddsProtos.ReplicationFactor factor, String owner) throws IOException {
+ public ContainerWithPipeline allocateContainer(
+ HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor,
+ String owner) throws IOException {
ContainerRequestProto request = ContainerRequestProto.newBuilder()
.setReplicationFactor(factor)
@@ -116,7 +117,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
throw new IOException(response.hasErrorMessage() ?
response.getErrorMessage() : "Allocate container failed.");
}
- return ContainerWithPipeline.fromProtobuf(response.getContainerWithPipeline());
+ return ContainerWithPipeline.fromProtobuf(
+ response.getContainerWithPipeline());
}
public ContainerInfo getContainer(long containerID) throws IOException {
@@ -138,17 +140,18 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
/**
* {@inheritDoc}
*/
- public ContainerWithPipeline getContainerWithPipeline(long containerID) throws IOException {
+ public ContainerWithPipeline getContainerWithPipeline(long containerID)
+ throws IOException {
Preconditions.checkState(containerID >= 0,
"Container ID cannot be negative");
- GetContainerWithPipelineRequestProto request = GetContainerWithPipelineRequestProto
- .newBuilder()
- .setContainerID(containerID)
- .build();
+ GetContainerWithPipelineRequestProto request =
+ GetContainerWithPipelineRequestProto.newBuilder()
+ .setContainerID(containerID).build();
try {
GetContainerWithPipelineResponseProto response =
rpcProxy.getContainerWithPipeline(NULL_RPC_CONTROLLER, request);
- return ContainerWithPipeline.fromProtobuf(response.getContainerWithPipeline());
+ return ContainerWithPipeline.fromProtobuf(
+ response.getContainerWithPipeline());
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
index abad9e3..1f2fafb 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
@@ -113,8 +113,8 @@ public final class ContainerProtocolCalls {
* @throws IOException if there is an I/O error while performing the call
*/
public static ContainerProtos.GetCommittedBlockLengthResponseProto
- getCommittedBlockLength(
- XceiverClientSpi xceiverClient, BlockID blockID, String traceID)
+ getCommittedBlockLength(
+ XceiverClientSpi xceiverClient, BlockID blockID, String traceID)
throws IOException {
ContainerProtos.GetCommittedBlockLengthRequestProto.Builder
getBlockLengthRequestBuilder =
@@ -375,7 +375,7 @@ public final class ContainerProtocolCalls {
}
/**
- * Reads the data given the blockID
+ * Reads the data given the blockID.
*
* @param client
* @param blockID - ID of the block
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
index ab6df92..15366fb 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
@@ -110,11 +110,6 @@ public final class OzoneConsts {
}
}
- /**
- * Ozone handler types.
- */
- public static final String OZONE_HANDLER_DISTRIBUTED = "distributed";
-
public static final String DELETING_KEY_PREFIX = "#deleting#";
public static final String DELETED_KEY_PREFIX = "#deleted#";
public static final String DELETE_TRANSACTION_KEY_PREFIX = "#delTX#";
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java
index 84a6f71..ee27021 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyData.java
@@ -37,7 +37,8 @@ public class KeyData {
/**
* Represent a list of chunks.
- * In order to reduce memory usage, chunkList is declared as an {@link Object}.
+ * In order to reduce memory usage, chunkList is declared as an
+ * {@link Object}.
* When #elements == 0, chunkList is null.
* When #elements == 1, chunkList refers to the only element.
* When #elements > 1, chunkList refers to the list.
@@ -157,7 +158,7 @@ public class KeyData {
}
/**
- * Adds chinkInfo to the list
+ * Adds chinkInfo to the list.
*/
public void addChunk(ContainerProtos.ChunkInfo chunkInfo) {
if (chunkList == null) {
@@ -237,7 +238,8 @@ public class KeyData {
} else {
final int n = chunks.size();
chunkList = n == 0? null: n == 1? chunks.get(0): chunks;
- size = chunks.parallelStream().mapToLong(ContainerProtos.ChunkInfo::getLen).sum();
+ size = chunks.parallelStream().mapToLong(
+ ContainerProtos.ChunkInfo::getLen).sum();
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/HddsVersionInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/HddsVersionInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/HddsVersionInfo.java
index 59b9de6..e7f697a 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/HddsVersionInfo.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/HddsVersionInfo.java
@@ -36,7 +36,8 @@ import java.util.Properties;
@InterfaceAudience.Public
@InterfaceStability.Stable
public class HddsVersionInfo {
- private static final Logger LOG = LoggerFactory.getLogger(HddsVersionInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(
+ HddsVersionInfo.class);
private Properties info;
@@ -95,7 +96,8 @@ public class HddsVersionInfo {
return info.getProperty("protocVersion", "Unknown");
}
- private static HddsVersionInfo HDDS_VERSION_INFO = new HddsVersionInfo("hdds");
+ private static final HddsVersionInfo HDDS_VERSION_INFO =
+ new HddsVersionInfo("hdds");
/**
* Get the HDDS version.
* @return the Hdds version string, eg. "0.6.3-dev"
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestMetadataStore.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestMetadataStore.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestMetadataStore.java
index 1bce022..30fc7f3 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestMetadataStore.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestMetadataStore.java
@@ -55,7 +55,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
index f359e72..348196c 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/HddsDatanodeService.java
@@ -232,7 +232,8 @@ public class HddsDatanodeService implements ServicePlugin {
public static void main(String[] args) {
try {
- if (DFSUtil.parseHelpArgument(args, "Starts HDDS Datanode", System.out, false)) {
+ if (DFSUtil.parseHelpArgument(
+ args, "Starts HDDS Datanode", System.out, false)) {
System.exit(0);
}
Configuration conf = new OzoneConfiguration();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
index 469c969..d96849e 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
@@ -43,7 +43,6 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.nio.file.Path;
import java.nio.file.Paths;
import org.yaml.snakeyaml.Yaml;
@@ -54,8 +53,6 @@ import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.Result.NO_SUCH_ALGORITHM;
import static org.apache.hadoop.ozone.container.common.impl.ContainerData
.CHARSET_ENCODING;
-import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_EXTENSION;
-
/**
* A set of helper functions to create proper responses.
@@ -75,14 +72,13 @@ public final class ContainerUtils {
* @return ContainerCommand Response Builder.
*/
public static ContainerCommandResponseProto.Builder
- getContainerCommandResponse(
- ContainerCommandRequestProto request, Result result, String message) {
- return
- ContainerCommandResponseProto.newBuilder()
- .setCmdType(request.getCmdType())
- .setTraceID(request.getTraceID())
- .setResult(result)
- .setMessage(message);
+ getContainerCommandResponse(
+ ContainerCommandRequestProto request, Result result, String message) {
+ return ContainerCommandResponseProto.newBuilder()
+ .setCmdType(request.getCmdType())
+ .setTraceID(request.getTraceID())
+ .setResult(result)
+ .setMessage(message);
}
/**
@@ -287,7 +283,7 @@ public final class ContainerUtils {
}
/**
- * Get the .container file from the containerBaseDir
+ * Get the .container file from the containerBaseDir.
* @param containerBaseDir container base directory. The name of this
* directory is same as the containerID
* @return the .container file
@@ -301,7 +297,7 @@ public final class ContainerUtils {
}
/**
- * ContainerID can be decoded from the container base directory name
+ * ContainerID can be decoded from the container base directory name.
*/
public static long getContainerID(File containerBaseDir) {
return Long.parseLong(containerBaseDir.getName());
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
index f92ab52..7f4f147 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
@@ -132,7 +132,7 @@ public class ContainerSet {
}
/**
- * Return a copy of the containerMap
+ * Return a copy of the containerMap.
* @return containerMap
*/
public Map<Long, Container> getContainerMap() {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/OpenContainerBlockMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/OpenContainerBlockMap.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/OpenContainerBlockMap.java
index ab7789b..1ef3d0d 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/OpenContainerBlockMap.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/OpenContainerBlockMap.java
@@ -45,10 +45,12 @@ public class OpenContainerBlockMap {
/**
* Map: localId -> KeyData.
*
- * In order to support {@link #getAll()}, the update operations are synchronized.
+ * In order to support {@link #getAll()}, the update operations are
+ * synchronized.
*/
static class KeyDataMap {
- private final ConcurrentMap<Long, KeyData> blocks = new ConcurrentHashMap<>();
+ private final ConcurrentMap<Long, KeyData> blocks =
+ new ConcurrentHashMap<>();
KeyData get(long localId) {
return blocks.get(localId);
@@ -59,7 +61,8 @@ public class OpenContainerBlockMap {
return blocks.size();
}
- synchronized KeyData computeIfAbsent(long localId, Function<Long, KeyData> f) {
+ synchronized KeyData computeIfAbsent(
+ long localId, Function<Long, KeyData> f) {
return blocks.computeIfAbsent(localId, f);
}
@@ -76,7 +79,8 @@ public class OpenContainerBlockMap {
*
* For now, we will track all open blocks of a container in the blockMap.
*/
- private final ConcurrentMap<Long, KeyDataMap> containers = new ConcurrentHashMap<>();
+ private final ConcurrentMap<Long, KeyDataMap> containers =
+ new ConcurrentHashMap<>();
/**
* Removes the Container matching with specified containerId.
@@ -109,7 +113,7 @@ public class OpenContainerBlockMap {
}
/**
- * returns the list of open to the openContainerBlockMap
+ * Returns the list of open to the openContainerBlockMap.
* @param containerId container id
* @return List of open Keys(blocks)
*/
@@ -130,15 +134,14 @@ public class OpenContainerBlockMap {
}
/**
- * Returns true if the block exists in the map, false otherwise
+ * Returns true if the block exists in the map, false otherwise.
*
* @param blockID
* @return True, if it exists, false otherwise
*/
public boolean checkIfBlockExists(BlockID blockID) {
KeyDataMap keyDataMap = containers.get(blockID.getContainerID());
- return keyDataMap == null ? false :
- keyDataMap.get(blockID.getLocalID()) != null;
+ return keyDataMap != null && keyDataMap.get(blockID.getLocalID()) != null;
}
@VisibleForTesting
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
index f8c7af2..8256722 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java
@@ -71,10 +71,10 @@ import java.util.concurrent.atomic.AtomicLong;
*/
public final class XceiverServerRatis implements XceiverServerSpi {
static final Logger LOG = LoggerFactory.getLogger(XceiverServerRatis.class);
- private static final AtomicLong callIdCounter = new AtomicLong();
+ private static final AtomicLong CALL_ID_COUNTER = new AtomicLong();
private static long nextCallId() {
- return callIdCounter.getAndIncrement() & Long.MAX_VALUE;
+ return CALL_ID_COUNTER.getAndIncrement() & Long.MAX_VALUE;
}
private final int port;
@@ -307,6 +307,6 @@ public final class XceiverServerRatis implements XceiverServerSpi {
RaftClientRequest.Type type) {
return new RaftClientRequest(clientId, server.getId(),
PipelineID.getFromProtobuf(pipelineID).getRaftGroupID(),
- nextCallId(),0, Message.valueOf(request.toByteString()), type);
+ nextCallId(), 0, Message.valueOf(request.toByteString()), type);
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/interfaces/KeyManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/interfaces/KeyManager.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/interfaces/KeyManager.java
index 37871be..84f771a 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/interfaces/KeyManager.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/interfaces/KeyManager.java
@@ -67,8 +67,8 @@ public interface KeyManager {
* @param count - Number of keys to return.
* @return List of Keys that match the criteria.
*/
- List<KeyData> listKey(Container container, long startLocalID, int count) throws
- IOException;
+ List<KeyData> listKey(Container container, long startLocalID, int count)
+ throws IOException;
/**
* Returns the last committed block length for the block.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CommandStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CommandStatus.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CommandStatus.java
index bf99700..32cf7c2 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CommandStatus.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CommandStatus.java
@@ -109,23 +109,23 @@ public class CommandStatus {
return new CommandStatusBuilder();
}
- public CommandStatusBuilder setType(Type type) {
- this.type = type;
+ public CommandStatusBuilder setType(Type commandType) {
+ this.type = commandType;
return this;
}
- public CommandStatusBuilder setCmdId(Long cmdId) {
- this.cmdId = cmdId;
+ public CommandStatusBuilder setCmdId(Long commandId) {
+ this.cmdId = commandId;
return this;
}
- public CommandStatusBuilder setStatus(Status status) {
- this.status = status;
+ public CommandStatusBuilder setStatus(Status commandStatus) {
+ this.status = commandStatus;
return this;
}
- public CommandStatusBuilder setMsg(String msg) {
- this.msg = msg;
+ public CommandStatusBuilder setMsg(String message) {
+ this.msg = message;
return this;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java
index 8827d1d..751775f 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/ScmTestMock.java
@@ -193,11 +193,13 @@ public class ScmTestMock implements StorageContainerDatanodeProtocol {
rpcCount.incrementAndGet();
heartbeatCount.incrementAndGet();
if(heartbeat.hasCommandStatusReport()){
- cmdStatusList.addAll(heartbeat.getCommandStatusReport().getCmdStatusList());
+ cmdStatusList.addAll(heartbeat.getCommandStatusReport()
+ .getCmdStatusList());
commandStatusReport.incrementAndGet();
}
sleepIfNeeded();
- return SCMHeartbeatResponseProto.newBuilder().addAllCommands(scmCommandRequests)
+ return SCMHeartbeatResponseProto.newBuilder().addAllCommands(
+ scmCommandRequests)
.setDatanodeUUID(heartbeat.getDatanodeDetails().getUuid())
.build();
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
index c9733f8..b658295 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
@@ -19,17 +19,12 @@
package org.apache.hadoop.ozone.container.common.interfaces;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.apache.hadoop.ozone.container.common.impl.HddsDispatcher;
import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueHandler;
-import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
-import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -37,8 +32,6 @@ import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import org.mockito.Mockito;
-import java.util.UUID;
-
/**
* Tests Handler interface.
*/
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/states/endpoint/TestHeartbeatEndpointTask.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/states/endpoint/TestHeartbeatEndpointTask.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/states/endpoint/TestHeartbeatEndpointTask.java
index 13de11f..69a6a33 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/states/endpoint/TestHeartbeatEndpointTask.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/states/endpoint/TestHeartbeatEndpointTask.java
@@ -22,8 +22,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto
- .StorageContainerDatanodeProtocolProtos.ContainerInfo;
-import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.ContainerAction;
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.CommandStatusReportsProto;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestRoundRobinVolumeChoosingPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestRoundRobinVolumeChoosingPolicy.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestRoundRobinVolumeChoosingPolicy.java
index a45a639..80594d35 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestRoundRobinVolumeChoosingPolicy.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestRoundRobinVolumeChoosingPolicy.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.ozone.container.common.volume;
import org.apache.hadoop.fs.GetSpaceUsed;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
@@ -43,8 +42,8 @@ public class TestRoundRobinVolumeChoosingPolicy {
private List<HddsVolume> volumes;
private final String baseDir = MiniDFSCluster.getBaseDirectory();
- private final String volume1 = baseDir + "disk1";
- private final String volume2 = baseDir + "disk2";
+ private final String volume1 = baseDir + "disk1";
+ private final String volume2 = baseDir + "disk2";
private static final String DUMMY_IP_ADDR = "0.0.0.0";
@Before
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
index 173a8b2..fea126b 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
@@ -62,7 +62,8 @@ public class TestOzoneContainer {
conf = new OzoneConfiguration();
conf.set(ScmConfigKeys.HDDS_DATANODE_DIR_KEY, folder.getRoot()
.getAbsolutePath());
- conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, folder.newFolder().getAbsolutePath());
+ conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS,
+ folder.newFolder().getAbsolutePath());
}
@Test
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/events/EventWatcher.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/events/EventWatcher.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/events/EventWatcher.java
index 38386d4..e3fee63 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/events/EventWatcher.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/events/EventWatcher.java
@@ -180,9 +180,11 @@ public abstract class EventWatcher<TIMEOUT_PAYLOAD extends
}
- protected abstract void onTimeout(EventPublisher publisher, TIMEOUT_PAYLOAD payload);
+ protected abstract void onTimeout(
+ EventPublisher publisher, TIMEOUT_PAYLOAD payload);
- protected abstract void onFinished(EventPublisher publisher, TIMEOUT_PAYLOAD payload);
+ protected abstract void onFinished(
+ EventPublisher publisher, TIMEOUT_PAYLOAD payload);
public List<TIMEOUT_PAYLOAD> getTimeoutEvents(
Predicate<? super TIMEOUT_PAYLOAD> predicate) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/events/TestEventQueue.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/events/TestEventQueue.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/events/TestEventQueue.java
index 2bdf705..0c1200f 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/events/TestEventQueue.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/server/events/TestEventQueue.java
@@ -22,9 +22,6 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import java.util.Set;
-import java.util.stream.Collectors;
-
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
index ca2a6a0..82d9a28 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
@@ -161,11 +161,11 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean {
lock.lock();
try {
for (int i = 0; i < count; i++) {
- ContainerWithPipeline containerWithPipeline = null;
+ ContainerWithPipeline containerWithPipeline;
try {
// TODO: Fix this later when Ratis is made the Default.
- containerWithPipeline = containerManager.allocateContainer(type, factor,
- owner);
+ containerWithPipeline = containerManager.allocateContainer(
+ type, factor, owner);
if (containerWithPipeline == null) {
LOG.warn("Unable to allocate container.");
@@ -293,12 +293,12 @@ public class BlockManagerImpl implements BlockManager, BlockmanagerMXBean {
private String getChannelName(ReplicationType type) {
switch (type) {
- case RATIS:
- return "RA" + UUID.randomUUID().toString().substring(3);
- case STAND_ALONE:
- return "SA" + UUID.randomUUID().toString().substring(3);
- default:
- return "RA" + UUID.randomUUID().toString().substring(3);
+ case RATIS:
+ return "RA" + UUID.randomUUID().toString().substring(3);
+ case STAND_ALONE:
+ return "SA" + UUID.randomUUID().toString().substring(3);
+ default:
+ return "RA" + UUID.randomUUID().toString().substring(3);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
index df97c27..49af65c 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
@@ -232,7 +232,8 @@ public class DeletedBlockLogImpl implements DeletedBlockLog {
lock.lock();
try {
Set<UUID> dnsWithCommittedTxn;
- for (DeleteBlockTransactionResult transactionResult : transactionResults) {
+ for (DeleteBlockTransactionResult transactionResult :
+ transactionResults) {
if (isTransactionFailed(transactionResult)) {
continue;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
index 699fd37..de3fe26 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
@@ -109,8 +109,8 @@ public class SCMBlockDeletingService extends BackgroundService {
public void handlePendingDeletes(PendingDeleteStatusList deletionStatusList) {
DatanodeDetails dnDetails = deletionStatusList.getDatanodeDetails();
- for (PendingDeleteStatusList.PendingDeleteStatus deletionStatus : deletionStatusList
- .getPendingDeleteStatuses()) {
+ for (PendingDeleteStatusList.PendingDeleteStatus deletionStatus :
+ deletionStatusList.getPendingDeleteStatuses()) {
LOG.info(
"Block deletion txnID mismatch in datanode {} for containerID {}."
+ " Datanode delete txnID: {}, SCM txnID: {}",
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/CloseContainerEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/CloseContainerEventHandler.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/CloseContainerEventHandler.java
index c723dfa..863907e 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/CloseContainerEventHandler.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/CloseContainerEventHandler.java
@@ -62,8 +62,8 @@ public class CloseContainerEventHandler implements EventHandler<ContainerID> {
containerManager.getContainerWithPipeline(containerID.getId());
info = containerWithPipeline.getContainerInfo();
if (info == null) {
- LOG.error("Failed to update the container state. Container with id : {} "
- + "does not exist", containerID.getId());
+ LOG.error("Failed to update the container state. Container with id : {}"
+ + " does not exist", containerID.getId());
return;
}
} catch (IOException e) {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java
index b000bfd..d506b0c 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerMapping.java
@@ -600,8 +600,8 @@ public class ContainerMapping implements Mapping {
.setReplicationType(knownState.getReplicationType())
.setReplicationFactor(knownState.getReplicationFactor());
- // TODO: If current state doesn't have this DN in list of DataNodes with replica
- // then add it in list of replicas.
+ // TODO: If current state doesn't have this DN in list of DataNodes with
+ // replica then add it in list of replicas.
// If used size is greater than allocated size, we will be updating
// allocated size with used size. This update is done as a fallback
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java
index 5eb8195..7afed42 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerStateManager.java
@@ -288,9 +288,10 @@ public class ContainerStateManager implements Closeable {
* @return ContainerWithPipeline
* @throws IOException on Failure.
*/
- public ContainerWithPipeline allocateContainer(PipelineSelector selector, HddsProtos
- .ReplicationType type, HddsProtos.ReplicationFactor replicationFactor,
- String owner) throws IOException {
+ public ContainerWithPipeline allocateContainer(PipelineSelector selector,
+ HddsProtos.ReplicationType type,
+ HddsProtos.ReplicationFactor replicationFactor, String owner)
+ throws IOException {
Pipeline pipeline = selector.getReplicationPipeline(type,
replicationFactor);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationManager.java
index 5f78722..4a980f7 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationManager.java
@@ -182,7 +182,7 @@ public class ReplicationManager implements Runnable {
}
/**
- * Event for the ReplicationCommandWatcher to repeate the embedded request
+ * Event for the ReplicationCommandWatcher to repeate the embedded request.
* in case fof timeout.
*/
public static class ReplicationRequestToRepeat
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java
index 6c6ce65..4d34cb7 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/states/ContainerStateMap.java
@@ -381,7 +381,7 @@ public class ContainerStateMap {
}
/**
- * Returns Open containers in the SCM by the Pipeline
+ * Returns Open containers in the SCM by the Pipeline.
*
* @param pipelineID - Pipeline id.
* @return NavigableSet<ContainerID>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/Node2ContainerMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/Node2ContainerMap.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/Node2ContainerMap.java
index 8ed6d59..d4d475e 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/Node2ContainerMap.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/Node2ContainerMap.java
@@ -84,8 +84,8 @@ public class Node2ContainerMap {
* @throws SCMException - if we don't know about this datanode, for new DN
* use insertNewDatanode.
*/
- public void setContainersForDatanode(UUID datanodeID, Set<ContainerID> containers)
- throws SCMException {
+ public void setContainersForDatanode(UUID datanodeID,
+ Set<ContainerID> containers) throws SCMException {
Preconditions.checkNotNull(datanodeID);
Preconditions.checkNotNull(containers);
if (dn2ContainerMap
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/NodeNotFoundException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/NodeNotFoundException.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/NodeNotFoundException.java
index 52a352e..c44a08c 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/NodeNotFoundException.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/NodeNotFoundException.java
@@ -17,8 +17,6 @@
package org.apache.hadoop.hdds.scm.node.states;
-import java.io.IOException;
-
/**
* This exception represents that the node that is being accessed does not
* exist in NodeStateMap.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/ReportResult.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/ReportResult.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/ReportResult.java
index 2697629..9bb6cf1 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/ReportResult.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/states/ReportResult.java
@@ -94,7 +94,8 @@ public class ReportResult {
if (nullSafeMissingContainers == null) {
nullSafeMissingContainers = Collections.emptySet();
}
- return new ReportResult(status, nullSafeMissingContainers, nullSafeNewContainers);
+ return new ReportResult(status, nullSafeMissingContainers,
+ nullSafeNewContainers);
}
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/Node2PipelineMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/Node2PipelineMap.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/Node2PipelineMap.java
index b860082..4a7fa81 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/Node2PipelineMap.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/Node2PipelineMap.java
@@ -23,39 +23,31 @@ import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
import org.apache.hadoop.hdds.scm.exceptions.SCMException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
import java.util.Set;
import java.util.UUID;
-import java.util.Map;
-import java.util.HashSet;
-import java.util.Collections;
-
import java.util.concurrent.ConcurrentHashMap;
-import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes
- .DUPLICATE_DATANODE;
-
+import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes.DUPLICATE_DATANODE;
/**
- * This data structure maintains the list of pipelines which the given datanode
- * is a part of.
- * This information will be added whenever a new pipeline allocation happens.
+ * This data structure maintains the list of pipelines which the given datanode is a part of. This
+ * information will be added whenever a new pipeline allocation happens.
*
- * TODO: this information needs to be regenerated from pipeline reports on
- * SCM restart
+ * <p>TODO: this information needs to be regenerated from pipeline reports on SCM restart
*/
public class Node2PipelineMap {
private final Map<UUID, Set<Pipeline>> dn2PipelineMap;
- /**
- * Constructs a Node2PipelineMap Object.
- */
+ /** Constructs a Node2PipelineMap Object. */
public Node2PipelineMap() {
dn2PipelineMap = new ConcurrentHashMap<>();
}
/**
- * Returns true if this a datanode that is already tracked by
- * Node2PipelineMap.
+ * Returns true if this a datanode that is already tracked by Node2PipelineMap.
*
* @param datanodeID - UUID of the Datanode.
* @return True if this is tracked, false if this map does not know about it.
@@ -71,18 +63,17 @@ public class Node2PipelineMap {
* @param datanodeID -- Datanode UUID
* @param pipelines - set of pipelines.
*/
- private void insertNewDatanode(UUID datanodeID, Set<Pipeline> pipelines)
- throws SCMException {
+ private void insertNewDatanode(UUID datanodeID, Set<Pipeline> pipelines) throws SCMException {
Preconditions.checkNotNull(pipelines);
Preconditions.checkNotNull(datanodeID);
- if(dn2PipelineMap.putIfAbsent(datanodeID, pipelines) != null) {
- throw new SCMException("Node already exists in the map",
- DUPLICATE_DATANODE);
+ if (dn2PipelineMap.putIfAbsent(datanodeID, pipelines) != null) {
+ throw new SCMException("Node already exists in the map", DUPLICATE_DATANODE);
}
}
/**
* Removes datanode Entry from the map.
+ *
* @param datanodeID - Datanode ID.
*/
public synchronized void removeDatanode(UUID datanodeID) {
@@ -98,20 +89,19 @@ public class Node2PipelineMap {
*/
public Set<Pipeline> getPipelines(UUID datanode) {
Preconditions.checkNotNull(datanode);
- return dn2PipelineMap.computeIfPresent(datanode, (k, v) ->
- Collections.unmodifiableSet(v));
+ return dn2PipelineMap.computeIfPresent(datanode, (k, v) -> Collections.unmodifiableSet(v));
}
/**
* Adds a pipeline entry to a given dataNode in the map.
+ *
* @param pipeline Pipeline to be added
*/
public synchronized void addPipeline(Pipeline pipeline) {
for (DatanodeDetails details : pipeline.getDatanodes().values()) {
UUID dnId = details.getUuid();
dn2PipelineMap
- .computeIfAbsent(dnId,
- k -> Collections.synchronizedSet(new HashSet<>()))
+ .computeIfAbsent(dnId, k -> Collections.synchronizedSet(new HashSet<>()))
.add(pipeline);
}
}
@@ -119,8 +109,12 @@ public class Node2PipelineMap {
public synchronized void removePipeline(Pipeline pipeline) {
for (DatanodeDetails details : pipeline.getDatanodes().values()) {
UUID dnId = details.getUuid();
- dn2PipelineMap.computeIfPresent(dnId,
- (k, v) -> {v.remove(pipeline); return v;});
+ dn2PipelineMap.computeIfPresent(
+ dnId,
+ (k, v) -> {
+ v.remove(pipeline);
+ return v;
+ });
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java
index 7d91ee4..5b1a7f7 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineManager.java
@@ -111,7 +111,7 @@ public abstract class PipelineManager {
ReplicationFactor replicationFactor);
/**
- * Initialize the pipeline
+ * Initialize the pipeline.
* TODO: move the initialization to Ozone Client later
*/
public abstract void initializePipeline(Pipeline pipeline) throws IOException;
@@ -176,7 +176,7 @@ public abstract class PipelineManager {
}
/**
- * Remove the pipeline from active allocation
+ * Remove the pipeline from active allocation.
* @param pipeline pipeline to be finalized
*/
public synchronized void finalizePipeline(Pipeline pipeline) {
@@ -193,7 +193,7 @@ public abstract class PipelineManager {
}
/**
- * list members in the pipeline .
+ * list members in the pipeline.
* @return the datanode
*/
public abstract List<DatanodeDetails> getMembers(PipelineID pipelineID)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java
index 5343bce..b02beb3 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipelines/PipelineSelector.java
@@ -126,7 +126,7 @@ public class PipelineSelector {
}
/**
- * Event and State Transition Mapping:
+ * Event and State Transition Mapping.
*
* State: ALLOCATED ---------------> CREATING
* Event: CREATE
@@ -293,7 +293,7 @@ public class PipelineSelector {
pipeline = manager.getPipeline(replicationFactor, replicationType);
} else {
// if a new pipeline is created, initialize its state machine
- updatePipelineState(pipeline,HddsProtos.LifeCycleEvent.CREATE);
+ updatePipelineState(pipeline, HddsProtos.LifeCycleEvent.CREATE);
//TODO: move the initialization of pipeline to Ozone Client
manager.initializePipeline(pipeline);
@@ -334,7 +334,8 @@ public class PipelineSelector {
/**
* Close a given pipeline.
*/
- public void closePipelineIfNoOpenContainers(Pipeline pipeline) throws IOException {
+ public void closePipelineIfNoOpenContainers(Pipeline pipeline)
+ throws IOException {
if (pipeline.getLifeCycleState() != LifeCycleState.CLOSING) {
return;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
index aefcf1b..77e495d 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
@@ -165,7 +165,8 @@ public class SCMClientProtocolServer implements
}
@Override
- public ContainerWithPipeline getContainerWithPipeline(long containerID) throws IOException {
+ public ContainerWithPipeline getContainerWithPipeline(long containerID)
+ throws IOException {
String remoteUser = getRpcRemoteUsername();
getScm().checkAdminAccess(remoteUser);
return scm.getScmContainerManager()
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java
index c466570..d617680 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/TestUtils.java
@@ -74,7 +74,7 @@ public final class TestUtils {
+ "." + random.nextInt(256)
+ "." + random.nextInt(256)
+ "." + random.nextInt(256);
- return createDatanodeDetails(uuid.toString(), "localhost", ipAddress);
+ return createDatanodeDetails(uuid.toString(), "localhost", ipAddress);
}
/**
@@ -259,12 +259,12 @@ public final class TestUtils {
StorageTypeProto storageTypeProto =
type == null ? StorageTypeProto.DISK : type;
srb.setStorageType(storageTypeProto);
- return srb.build();
+ return srb.build();
}
/**
- * Generates random container reports
+ * Generates random container reports.
*
* @return ContainerReportsProto
*/
@@ -281,7 +281,7 @@ public final class TestUtils {
*/
public static ContainerReportsProto getRandomContainerReports(
int numberOfContainers) {
- List<ContainerInfo> containerInfos = new ArrayList<>();
+ List<ContainerInfo> containerInfos = new ArrayList<>();
for (int i = 0; i < numberOfContainers; i++) {
containerInfos.add(getRandomContainerInfo(i));
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
index 2beb4e7..68c5813 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
@@ -39,7 +39,6 @@ import org.junit.rules.ExpectedException;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
-import java.util.Collections;
import static org.apache.hadoop.ozone.OzoneConsts.GB;
import static org.apache.hadoop.ozone.OzoneConsts.MB;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
index 1d06ea4..9f0e336 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
@@ -102,8 +102,8 @@ public class TestDeletedBlockLog {
ContainerInfo containerInfo =
new ContainerInfo.Builder().setContainerID(1).build();
Pipeline pipeline =
- new Pipeline(null, LifeCycleState.CLOSED, ReplicationType.RATIS,
- ReplicationFactor.THREE, null);
+ new Pipeline(null, LifeCycleState.CLOSED,
+ ReplicationType.RATIS, ReplicationFactor.THREE, null);
pipeline.addMember(dnList.get(0));
pipeline.addMember(dnList.get(1));
pipeline.addMember(dnList.get(2));
@@ -379,7 +379,8 @@ public class TestDeletedBlockLog {
Assert.assertTrue(transactions.isFull());
}
- private void mockContainerInfo(long containerID, DatanodeDetails dd) throws IOException {
+ private void mockContainerInfo(long containerID, DatanodeDetails dd)
+ throws IOException {
Pipeline pipeline =
new Pipeline("fake", LifeCycleState.OPEN,
ReplicationType.STAND_ALONE, ReplicationFactor.ONE,
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/command/TestCommandStatusReportHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/command/TestCommandStatusReportHandler.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/command/TestCommandStatusReportHandler.java
index eca5b87..65a2e29 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/command/TestCommandStatusReportHandler.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/command/TestCommandStatusReportHandler.java
@@ -48,12 +48,11 @@ import static org.junit.Assert.assertFalse;
public class TestCommandStatusReportHandler implements EventPublisher {
- private static Logger LOG = LoggerFactory
+ private static final Logger LOG = LoggerFactory
.getLogger(TestCommandStatusReportHandler.class);
private CommandStatusReportHandler cmdStatusReportHandler;
private String storagePath = GenericTestUtils.getRandomizedTempPath()
.concat("/" + UUID.randomUUID().toString());
- ;
@Before
public void setup() {
@@ -69,10 +68,9 @@ public class TestCommandStatusReportHandler implements EventPublisher {
.emptyList());
cmdStatusReportHandler.onMessage(report, this);
assertFalse(logCapturer.getOutput().contains("DeleteBlockCommandStatus"));
- assertFalse(logCapturer.getOutput().contains
- ("CloseContainerCommandStatus"));
- assertFalse(logCapturer.getOutput().contains
- ("ReplicateCommandStatus"));
+ assertFalse(logCapturer.getOutput().contains(
+ "CloseContainerCommandStatus"));
+ assertFalse(logCapturer.getOutput().contains("ReplicateCommandStatus"));
report = this.getStatusReport(this.getCommandStatusList());
@@ -93,13 +91,13 @@ public class TestCommandStatusReportHandler implements EventPublisher {
}
- private CommandStatusReportFromDatanode getStatusReport(List<CommandStatus>
- reports) {
- CommandStatusReportsProto report = TestUtils.createCommandStatusReport
- (reports);
+ private CommandStatusReportFromDatanode getStatusReport(
+ List<CommandStatus> reports) {
+ CommandStatusReportsProto report = TestUtils.createCommandStatusReport(
+ reports);
DatanodeDetails dn = TestUtils.randomDatanodeDetails();
- return new SCMDatanodeHeartbeatDispatcher.CommandStatusReportFromDatanode
- (dn, report);
+ return new SCMDatanodeHeartbeatDispatcher.CommandStatusReportFromDatanode(
+ dn, report);
}
@Override
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
index 4790c82..38050c9 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
@@ -37,7 +37,6 @@ import org.junit.Test;
import java.io.File;
import java.io.IOException;
-import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent.CREATE;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleEvent.CREATED;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java
index b0b39f1..c5686f5 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerMapping.java
@@ -178,8 +178,8 @@ public class TestContainerMapping {
mapping
.updateContainerState(contInfo.getContainerID(), LifeCycleEvent.CLOSE);
ContainerInfo finalContInfo = contInfo;
- LambdaTestUtils.intercept(SCMException.class,"No entry exist for "
- + "containerId:" , () -> mapping.getContainerWithPipeline(
+ LambdaTestUtils.intercept(SCMException.class, "No entry exist for "
+ + "containerId:", () -> mapping.getContainerWithPipeline(
finalContInfo.getContainerID()));
mapping.getStateManager().getContainerStateMap()
@@ -376,7 +376,8 @@ public class TestContainerMapping {
@Test
public void testFlushAllContainers() throws IOException {
ContainerInfo info = createContainer();
- List<ContainerInfo> containers = mapping.getStateManager().getAllContainers();
+ List<ContainerInfo> containers = mapping.getStateManager()
+ .getAllContainers();
Assert.assertTrue(containers.size() > 0);
mapping.flushContainerInfo();
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
index 443b4b2..66f0966 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
@@ -86,7 +86,7 @@ public class TestContainerReportHandler implements EventPublisher {
.setContainerID((Long) invocation.getArguments()[0])
.setState(LifeCycleState.CLOSED)
.build()
- );
+ );
ContainerStateManager containerStateManager =
new ContainerStateManager(conf, mapping);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c5629d54/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
index fea1e4b..764daff 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/placement/algorithms/TestSCMContainerPlacementCapacity.java
@@ -52,13 +52,13 @@ public class TestSCMContainerPlacementCapacity {
.thenReturn(new ArrayList<>(datanodes));
when(mockNodeManager.getNodeStat(anyObject()))
- .thenReturn(new SCMNodeMetric(100l, 0L, 100L));
+ .thenReturn(new SCMNodeMetric(100L, 0L, 100L));
when(mockNodeManager.getNodeStat(datanodes.get(2)))
- .thenReturn(new SCMNodeMetric(100l, 90L, 10L));
+ .thenReturn(new SCMNodeMetric(100L, 90L, 10L));
when(mockNodeManager.getNodeStat(datanodes.get(3)))
- .thenReturn(new SCMNodeMetric(100l, 80L, 20L));
+ .thenReturn(new SCMNodeMetric(100L, 80L, 20L));
when(mockNodeManager.getNodeStat(datanodes.get(4)))
- .thenReturn(new SCMNodeMetric(100l, 70L, 30L));
+ .thenReturn(new SCMNodeMetric(100L, 70L, 30L));
SCMContainerPlacementCapacity scmContainerPlacementRandom =
new SCMContainerPlacementCapacity(mockNodeManager, conf);
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org