You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sh...@apache.org on 2018/08/26 00:44:43 UTC
[31/50] [abbrv] hadoop git commit: HDDS-265. Move
numPendingDeletionBlocks and deleteTransactionId from ContainerData to
KeyValueContainerData. Contributed by LiXin Ge.
HDDS-265. Move numPendingDeletionBlocks and deleteTransactionId from ContainerData to KeyValueContainerData. Contributed by LiXin Ge.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5aa15cfa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5aa15cfa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5aa15cfa
Branch: refs/heads/HDFS-12943
Commit: 5aa15cfaffbf294b5025989c20d905b01da52c2b
Parents: 8184739
Author: Hanisha Koneru <ha...@apache.org>
Authored: Wed Aug 22 10:04:15 2018 -0700
Committer: Hanisha Koneru <ha...@apache.org>
Committed: Wed Aug 22 10:04:15 2018 -0700
----------------------------------------------------------------------
.../common/helpers/ContainerReport.java | 14 ---
.../common/helpers/KeyValueContainerReport.java | 117 +++++++++++++++++++
.../container/common/impl/ContainerData.java | 53 ---------
.../container/common/impl/ContainerSet.java | 50 +-------
.../RandomContainerDeletionChoosingPolicy.java | 3 +-
...NOrderedContainerDeletionChoosingPolicy.java | 24 ++--
.../container/common/interfaces/Container.java | 6 +
.../ContainerDeletionChoosingPolicy.java | 13 +++
.../container/keyvalue/KeyValueContainer.java | 46 ++++++++
.../keyvalue/KeyValueContainerData.java | 54 +++++++++
.../common/TestKeyValueContainerData.java | 4 +-
.../common/TestBlockDeletingService.java | 8 +-
.../commandhandler/TestBlockDeletion.java | 9 +-
13 files changed, 268 insertions(+), 133 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java
index b242754..a4c1f2f 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerReport.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.ozone.container.common.helpers;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo;
-import static java.lang.Math.max;
/**
* Container Report iterates the closed containers and sends a container report
@@ -37,7 +36,6 @@ public class ContainerReport {
private long readBytes;
private long writeBytes;
private long containerID;
- private long deleteTransactionId;
public long getContainerID() {
return containerID;
@@ -47,9 +45,6 @@ public class ContainerReport {
this.containerID = containerID;
}
-
-
-
/**
* Constructs the ContainerReport.
*
@@ -66,7 +61,6 @@ public class ContainerReport {
this.readBytes = 0L;
this.writeCount = 0L;
this.writeBytes = 0L;
- this.deleteTransactionId = 0;
}
/**
@@ -100,9 +94,6 @@ public class ContainerReport {
if (info.hasWriteBytes()) {
report.setWriteBytes(info.getWriteBytes());
}
- if (info.hasDeleteTransactionId()) {
- report.updateDeleteTransactionId(info.getDeleteTransactionId());
- }
report.setContainerID(info.getContainerID());
return report;
@@ -193,10 +184,6 @@ public class ContainerReport {
this.bytesUsed = bytesUsed;
}
- public void updateDeleteTransactionId(long transactionId) {
- this.deleteTransactionId = max(transactionId, deleteTransactionId);
- }
-
/**
* Gets a containerInfo protobuf message from ContainerReports.
*
@@ -213,7 +200,6 @@ public class ContainerReport {
.setWriteBytes(this.getWriteBytes())
.setFinalhash(this.getFinalhash())
.setContainerID(this.getContainerID())
- .setDeleteTransactionId(this.deleteTransactionId)
.build();
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyValueContainerReport.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyValueContainerReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyValueContainerReport.java
new file mode 100644
index 0000000..b03487b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/KeyValueContainerReport.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerInfo;
+
+import static java.lang.Math.max;
+
+/**
+ * KeyValueContainer Report iterates the closed containers and sends a
+ * container report to SCM.
+ */
+public class KeyValueContainerReport extends ContainerReport{
+ private long deleteTransactionId;
+
+ /**
+ * Constructs the KeyValueContainerReport.
+ *
+ * @param containerID - Container ID.
+ * @param finalhash - Final Hash.
+ */
+ public KeyValueContainerReport(long containerID, String finalhash) {
+ super(containerID, finalhash);
+ this.deleteTransactionId = 0;
+ }
+
+ /**
+ * Sets the deleteTransactionId if it is greater than existing.
+ * @param transactionId - deleteTransactionId
+ */
+ public void updateDeleteTransactionId(long transactionId) {
+ this.deleteTransactionId = max(transactionId, deleteTransactionId);
+ }
+
+ /**
+ * Gets the deleteTransactionId.
+ * @return - deleteTransactionId.
+ */
+ public long getDeleteTransactionId() {
+ return this.deleteTransactionId;
+ }
+
+ /**
+ * Gets a containerReport from protobuf class.
+ *
+ * @param info - ContainerInfo.
+ * @return - ContainerReport.
+ */
+ public static KeyValueContainerReport getFromProtoBuf(ContainerInfo info) {
+ Preconditions.checkNotNull(info);
+ KeyValueContainerReport report = new KeyValueContainerReport(
+ info.getContainerID(), info.getFinalhash());
+ if (info.hasSize()) {
+ report.setSize(info.getSize());
+ }
+ if (info.hasKeyCount()) {
+ report.setKeyCount(info.getKeyCount());
+ }
+ if (info.hasUsed()) {
+ report.setBytesUsed(info.getUsed());
+ }
+ if (info.hasReadCount()) {
+ report.setReadCount(info.getReadCount());
+ }
+ if (info.hasReadBytes()) {
+ report.setReadBytes(info.getReadBytes());
+ }
+ if (info.hasWriteCount()) {
+ report.setWriteCount(info.getWriteCount());
+ }
+ if (info.hasWriteBytes()) {
+ report.setWriteBytes(info.getWriteBytes());
+ }
+ if (info.hasDeleteTransactionId()) {
+ report.updateDeleteTransactionId(info.getDeleteTransactionId());
+ }
+ report.setContainerID(info.getContainerID());
+ return report;
+ }
+
+ /**
+ * Gets a containerInfo protobuf message from ContainerReports.
+ *
+ * @return ContainerInfo
+ */
+ @Override
+ public ContainerInfo getProtoBufMessage() {
+ return ContainerInfo.newBuilder()
+ .setKeyCount(this.getKeyCount())
+ .setSize(this.getSize())
+ .setUsed(this.getBytesUsed())
+ .setReadCount(this.getReadCount())
+ .setReadBytes(this.getReadBytes())
+ .setWriteCount(this.getWriteCount())
+ .setWriteBytes(this.getWriteBytes())
+ .setFinalhash(this.getFinalhash())
+ .setContainerID(this.getContainerID())
+ .setDeleteTransactionId(this.getDeleteTransactionId())
+ .build();
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
index 26954a7..47894dc 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
@@ -33,11 +33,9 @@ import org.apache.hadoop.ozone.container.common.volume.HddsVolume;
import java.util.Collections;
import java.util.Map;
import java.util.TreeMap;
-import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.yaml.snakeyaml.Yaml;
-import static java.lang.Math.max;
import static org.apache.hadoop.ozone.OzoneConsts.CHECKSUM;
import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_ID;
import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_TYPE;
@@ -81,8 +79,6 @@ public abstract class ContainerData {
private HddsVolume volume;
- private long deleteTransactionId;
-
private String checksum;
public static final Charset CHARSET_ENCODING = Charset.forName("UTF-8");
private static final String DUMMY_CHECKSUM = new String(new byte[64],
@@ -99,12 +95,6 @@ public abstract class ContainerData {
MAX_SIZE_GB,
CHECKSUM));
-
- /**
- * Number of pending deletion blocks in container.
- */
- private final AtomicInteger numPendingDeletionBlocks;
-
/**
* Creates a ContainerData Object, which holds metadata of the container.
* @param type - ContainerType
@@ -139,8 +129,6 @@ public abstract class ContainerData {
this.bytesUsed = new AtomicLong(0L);
this.keyCount = new AtomicLong(0L);
this.maxSizeGB = size;
- this.numPendingDeletionBlocks = new AtomicInteger(0);
- this.deleteTransactionId = 0;
setChecksumTo0ByteArray();
}
@@ -403,31 +391,6 @@ public abstract class ContainerData {
this.keyCount.set(count);
}
- /**
- * Increase the count of pending deletion blocks.
- *
- * @param numBlocks increment number
- */
- public void incrPendingDeletionBlocks(int numBlocks) {
- this.numPendingDeletionBlocks.addAndGet(numBlocks);
- }
-
- /**
- * Decrease the count of pending deletion blocks.
- *
- * @param numBlocks decrement number
- */
- public void decrPendingDeletionBlocks(int numBlocks) {
- this.numPendingDeletionBlocks.addAndGet(-1 * numBlocks);
- }
-
- /**
- * Get the number of pending deletion blocks.
- */
- public int getNumPendingDeletionBlocks() {
- return this.numPendingDeletionBlocks.get();
- }
-
public void setChecksumTo0ByteArray() {
this.checksum = DUMMY_CHECKSUM;
}
@@ -469,20 +432,4 @@ public abstract class ContainerData {
* @return Protocol Buffer Message
*/
public abstract ContainerProtos.ContainerData getProtoBufMessage();
-
- /**
- * Sets deleteTransactionId to latest delete transactionId for the container.
- *
- * @param transactionId latest transactionId of the container.
- */
- public void updateDeleteTransactionId(long transactionId) {
- deleteTransactionId = max(transactionId, deleteTransactionId);
- }
-
- /**
- * Return the latest deleteTransactionId of the container.
- */
- public long getDeleteTransactionId() {
- return deleteTransactionId;
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
index 3da09f2..f92ab52 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerSet.java
@@ -22,9 +22,6 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.protocol.proto
- .StorageContainerDatanodeProtocolProtos.ContainerInfo;
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.ContainerReportsProto;
import org.apache.hadoop.hdds.scm.container.common.helpers
@@ -43,8 +40,6 @@ import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.stream.Collectors;
-import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
- .Result.INVALID_CONTAINER_STATE;
/**
* Class that manages Containers created on the datanode.
@@ -204,58 +199,19 @@ public class ContainerSet {
ContainerReportsProto.Builder crBuilder =
ContainerReportsProto.newBuilder();
-
for (Container container: containers) {
- long containerId = container.getContainerData().getContainerID();
- ContainerInfo.Builder ciBuilder = ContainerInfo.newBuilder();
- ContainerData containerData = container.getContainerData();
- ciBuilder.setContainerID(containerId)
- .setReadCount(containerData.getReadCount())
- .setWriteCount(containerData.getWriteCount())
- .setReadBytes(containerData.getReadBytes())
- .setWriteBytes(containerData.getWriteBytes())
- .setUsed(containerData.getBytesUsed())
- .setState(getState(containerData))
- .setDeleteTransactionId(containerData.getDeleteTransactionId());
-
- crBuilder.addReports(ciBuilder.build());
+ crBuilder.addReports(container.getContainerReport());
}
return crBuilder.build();
}
- /**
- * Returns LifeCycle State of the container.
- * @param containerData - ContainerData
- * @return LifeCycle State of the container
- * @throws StorageContainerException
- */
- private HddsProtos.LifeCycleState getState(ContainerData containerData)
- throws StorageContainerException {
- HddsProtos.LifeCycleState state;
- switch (containerData.getState()) {
- case OPEN:
- state = HddsProtos.LifeCycleState.OPEN;
- break;
- case CLOSING:
- state = HddsProtos.LifeCycleState.CLOSING;
- break;
- case CLOSED:
- state = HddsProtos.LifeCycleState.CLOSED;
- break;
- default:
- throw new StorageContainerException("Invalid Container state found: " +
- containerData.getContainerID(), INVALID_CONTAINER_STATE);
- }
- return state;
- }
-
public List<ContainerData> chooseContainerForBlockDeletion(int count,
ContainerDeletionChoosingPolicy deletionPolicy)
throws StorageContainerException {
Map<Long, ContainerData> containerDataMap = containerMap.entrySet().stream()
- .filter(e -> e.getValue().getContainerType()
- == ContainerProtos.ContainerType.KeyValueContainer)
+ .filter(e -> deletionPolicy.isValidContainerType(
+ e.getValue().getContainerType()))
.collect(Collectors.toMap(Map.Entry::getKey,
e -> e.getValue().getContainerData()));
return deletionPolicy
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java
index 83d746b..5c6c319 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/RandomContainerDeletionChoosingPolicy.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hdds.scm.container.common.helpers
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.ozone.container.common.interfaces
.ContainerDeletionChoosingPolicy;
+import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,7 +59,7 @@ public class RandomContainerDeletionChoosingPolicy
LOG.debug("Select container {} for block deletion, "
+ "pending deletion blocks num: {}.",
entry.getContainerID(),
- entry.getNumPendingDeletionBlocks());
+ ((KeyValueContainerData)entry).getNumPendingDeletionBlocks());
} else {
break;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java
index 68074fc..b17680c 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/TopNOrderedContainerDeletionChoosingPolicy.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException;
import org.apache.hadoop.ozone.container.common.interfaces
.ContainerDeletionChoosingPolicy;
+import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -41,14 +42,11 @@ public class TopNOrderedContainerDeletionChoosingPolicy
LoggerFactory.getLogger(TopNOrderedContainerDeletionChoosingPolicy.class);
/** customized comparator used to compare differentiate container data. **/
- private static final Comparator<ContainerData> CONTAINER_DATA_COMPARATOR
- = new Comparator<ContainerData>() {
- @Override
- public int compare(ContainerData c1, ContainerData c2) {
- return Integer.compare(c2.getNumPendingDeletionBlocks(),
- c1.getNumPendingDeletionBlocks());
- }
- };
+ private static final Comparator<KeyValueContainerData>
+ KEY_VALUE_CONTAINER_DATA_COMPARATOR = (KeyValueContainerData c1,
+ KeyValueContainerData c2) ->
+ Integer.compare(c2.getNumPendingDeletionBlocks(),
+ c1.getNumPendingDeletionBlocks());
@Override
public List<ContainerData> chooseContainerForBlockDeletion(int count,
@@ -58,13 +56,15 @@ public class TopNOrderedContainerDeletionChoosingPolicy
"Internal assertion: candidate containers cannot be null");
List<ContainerData> result = new LinkedList<>();
- List<ContainerData> orderedList = new LinkedList<>();
- orderedList.addAll(candidateContainers.values());
- Collections.sort(orderedList, CONTAINER_DATA_COMPARATOR);
+ List<KeyValueContainerData> orderedList = new LinkedList<>();
+ for (ContainerData entry : candidateContainers.values()) {
+ orderedList.add((KeyValueContainerData)entry);
+ }
+ Collections.sort(orderedList, KEY_VALUE_CONTAINER_DATA_COMPARATOR);
// get top N list ordered by pending deletion blocks' number
int currentCount = 0;
- for (ContainerData entry : orderedList) {
+ for (KeyValueContainerData entry : orderedList) {
if (currentCount < count) {
if (entry.getNumPendingDeletionBlocks() > 0) {
result.add(entry);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/Container.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/Container.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/Container.java
index a7077d9..7f706b5 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/Container.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/Container.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.ozone.container.common.interfaces;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerLifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers.
StorageContainerException;
@@ -111,4 +112,9 @@ public interface Container extends RwLock {
*/
BlockIterator blockIterator() throws IOException;
+ /**
+ * Returns containerReport for the container.
+ */
+ StorageContainerDatanodeProtocolProtos.ContainerInfo getContainerReport()
+ throws StorageContainerException;
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java
index dce86e9..84c4f90 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerDeletionChoosingPolicy.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.ozone.container.common.interfaces;
+import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException;
import org.apache.hadoop.ozone.container.common.impl.ContainerData;
@@ -42,4 +43,16 @@ public interface ContainerDeletionChoosingPolicy {
List<ContainerData> chooseContainerForBlockDeletion(int count,
Map<Long, ContainerData> candidateContainers)
throws StorageContainerException;
+
+ /**
+ * Determine if the container has suitable type for this policy.
+ * @param type type of the container
+ * @return whether the container type suitable for this policy.
+ */
+ default boolean isValidContainerType(ContainerProtos.ContainerType type) {
+ if (type == ContainerProtos.ContainerType.KeyValueContainer) {
+ return true;
+ }
+ return false;
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
index c96f997..0ea748a 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
@@ -28,6 +28,8 @@ import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerLifeCycleState;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerType;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos;
import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException;
import org.apache.hadoop.io.nativeio.NativeIO;
@@ -406,6 +408,50 @@ public class KeyValueContainer implements Container {
}
/**
+ * Returns KeyValueContainerReport for the KeyValueContainer.
+ */
+ @Override
+ public StorageContainerDatanodeProtocolProtos.ContainerInfo
+ getContainerReport() throws StorageContainerException{
+ StorageContainerDatanodeProtocolProtos.ContainerInfo.Builder ciBuilder =
+ StorageContainerDatanodeProtocolProtos.ContainerInfo.newBuilder();
+ ciBuilder.setContainerID(containerData.getContainerID())
+ .setReadCount(containerData.getReadCount())
+ .setWriteCount(containerData.getWriteCount())
+ .setReadBytes(containerData.getReadBytes())
+ .setWriteBytes(containerData.getWriteBytes())
+ .setUsed(containerData.getBytesUsed())
+ .setState(getHddsState())
+ .setDeleteTransactionId(containerData.getDeleteTransactionId());
+ return ciBuilder.build();
+ }
+
+ /**
+ * Returns LifeCycle State of the container.
+ * @return LifeCycle State of the container in HddsProtos format
+ * @throws StorageContainerException
+ */
+ private HddsProtos.LifeCycleState getHddsState()
+ throws StorageContainerException {
+ HddsProtos.LifeCycleState state;
+ switch (containerData.getState()) {
+ case OPEN:
+ state = HddsProtos.LifeCycleState.OPEN;
+ break;
+ case CLOSING:
+ state = HddsProtos.LifeCycleState.CLOSING;
+ break;
+ case CLOSED:
+ state = HddsProtos.LifeCycleState.CLOSED;
+ break;
+ default:
+ throw new StorageContainerException("Invalid Container state found: " +
+ containerData.getContainerID(), INVALID_CONTAINER_STATE);
+ }
+ return state;
+ }
+
+ /**
* Returns container DB file.
* @return
*/
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
index 0705cf4..1d37437 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
@@ -32,7 +32,9 @@ import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import static java.lang.Math.max;
import static org.apache.hadoop.ozone.OzoneConsts.CHUNKS_PATH;
import static org.apache.hadoop.ozone.OzoneConsts.CONTAINER_DB_TYPE;
import static org.apache.hadoop.ozone.OzoneConsts.METADATA_PATH;
@@ -61,6 +63,13 @@ public class KeyValueContainerData extends ContainerData {
private File dbFile = null;
+ /**
+ * Number of pending deletion blocks in KeyValueContainer.
+ */
+ private final AtomicInteger numPendingDeletionBlocks;
+
+ private long deleteTransactionId;
+
static {
// Initialize YAML fields
KV_YAML_FIELDS = Lists.newArrayList();
@@ -77,6 +86,8 @@ public class KeyValueContainerData extends ContainerData {
*/
public KeyValueContainerData(long id, int size) {
super(ContainerProtos.ContainerType.KeyValueContainer, id, size);
+ this.numPendingDeletionBlocks = new AtomicInteger(0);
+ this.deleteTransactionId = 0;
}
/**
@@ -88,6 +99,8 @@ public class KeyValueContainerData extends ContainerData {
public KeyValueContainerData(long id, int layOutVersion, int size) {
super(ContainerProtos.ContainerType.KeyValueContainer, id, layOutVersion,
size);
+ this.numPendingDeletionBlocks = new AtomicInteger(0);
+ this.deleteTransactionId = 0;
}
@@ -169,6 +182,47 @@ public class KeyValueContainerData extends ContainerData {
}
/**
+ * Increase the count of pending deletion blocks.
+ *
+ * @param numBlocks increment number
+ */
+ public void incrPendingDeletionBlocks(int numBlocks) {
+ this.numPendingDeletionBlocks.addAndGet(numBlocks);
+ }
+
+ /**
+ * Decrease the count of pending deletion blocks.
+ *
+ * @param numBlocks decrement number
+ */
+ public void decrPendingDeletionBlocks(int numBlocks) {
+ this.numPendingDeletionBlocks.addAndGet(-1 * numBlocks);
+ }
+
+ /**
+ * Get the number of pending deletion blocks.
+ */
+ public int getNumPendingDeletionBlocks() {
+ return this.numPendingDeletionBlocks.get();
+ }
+
+ /**
+ * Sets deleteTransactionId to latest delete transactionId for the container.
+ *
+ * @param transactionId latest transactionId of the container.
+ */
+ public void updateDeleteTransactionId(long transactionId) {
+ deleteTransactionId = max(transactionId, deleteTransactionId);
+ }
+
+ /**
+ * Return the latest deleteTransactionId of the container.
+ */
+ public long getDeleteTransactionId() {
+ return deleteTransactionId;
+ }
+
+ /**
* Returns a ProtoBuf Message from ContainerData.
*
* @return Protocol Buffer Message
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
index 42db66d..12ce163 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
@@ -57,6 +57,7 @@ public class TestKeyValueContainerData {
assertEquals(val.get(), kvData.getReadCount());
assertEquals(val.get(), kvData.getWriteCount());
assertEquals(val.get(), kvData.getKeyCount());
+ assertEquals(val.get(), kvData.getNumPendingDeletionBlocks());
assertEquals(MAXSIZE, kvData.getMaxSizeGB());
kvData.setState(state);
@@ -68,6 +69,7 @@ public class TestKeyValueContainerData {
kvData.incrReadCount();
kvData.incrWriteCount();
kvData.incrKeyCount();
+ kvData.incrPendingDeletionBlocks(1);
assertEquals(state, kvData.getState());
assertEquals(containerDBType, kvData.getContainerDBType());
@@ -79,7 +81,7 @@ public class TestKeyValueContainerData {
assertEquals(1, kvData.getReadCount());
assertEquals(1, kvData.getWriteCount());
assertEquals(1, kvData.getKeyCount());
-
+ assertEquals(1, kvData.getNumPendingDeletionBlocks());
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java
index 4ca4124..dcf4022 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/TestBlockDeletingService.java
@@ -203,8 +203,12 @@ public class TestBlockDeletingService {
MetadataStore meta = KeyUtils.getDB(
(KeyValueContainerData) containerData.get(0), conf);
Map<Long, Container> containerMap = containerSet.getContainerMap();
- long transactionId = containerMap.get(containerData.get(0).getContainerID())
- .getContainerData().getDeleteTransactionId();
+ // NOTE: this test assumes that all the container is KetValueContainer and
+ // have DeleteTransactionId in KetValueContainerData. If other
+ // types is going to be added, this test should be checked.
+ long transactionId = ((KeyValueContainerData)containerMap
+ .get(containerData.get(0).getContainerID()).getContainerData())
+ .getDeleteTransactionId();
// Number of deleted blocks in container should be equal to 0 before
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aa15cfa/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestBlockDeletion.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestBlockDeletion.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestBlockDeletion.java
index 2524de6..94cdf61 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestBlockDeletion.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestBlockDeletion.java
@@ -139,7 +139,9 @@ public class TestBlockDeletion {
Assert.assertTrue(verifyBlocksCreated(omKeyLocationInfoGroupList));
// No containers with deleted blocks
Assert.assertTrue(containerIdsWithDeletedBlocks.isEmpty());
- // Delete transactionIds for the containers should be 0
+ // Delete transactionIds for the containers should be 0.
+ // NOTE: this test assumes that all the container is KetValueContainer. If
+ // other container types is going to be added, this test should be checked.
matchContainerTransactionIds();
om.deleteKey(keyArgs);
Thread.sleep(5000);
@@ -215,8 +217,9 @@ public class TestBlockDeletion {
Assert.assertEquals(
scm.getContainerInfo(containerId).getDeleteTransactionId(), 0);
}
- Assert.assertEquals(dnContainerSet.getContainer(containerId)
- .getContainerData().getDeleteTransactionId(),
+ Assert.assertEquals(((KeyValueContainerData)dnContainerSet
+ .getContainer(containerId).getContainerData())
+ .getDeleteTransactionId(),
scm.getContainerInfo(containerId).getDeleteTransactionId());
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org