You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by bh...@apache.org on 2018/07/09 20:33:34 UTC
[25/37] hadoop git commit: HDDS-176. Add keyCount and container
maximum size to ContainerData. Contributed by Bharat Viswanadham.
HDDS-176. Add keyCount and container maximum size to ContainerData. Contributed by Bharat Viswanadham.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e1f4b3b5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e1f4b3b5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e1f4b3b5
Branch: refs/heads/trunk
Commit: e1f4b3b560a9ec2b34bb9ffbfe71fd3b0ac48120
Parents: 44b091a
Author: Hanisha Koneru <ha...@apache.org>
Authored: Tue Jul 3 09:53:41 2018 -0700
Committer: Hanisha Koneru <ha...@apache.org>
Committed: Tue Jul 3 09:53:41 2018 -0700
----------------------------------------------------------------------
.../container/common/impl/ContainerData.java | 50 +++++++++++++++++++-
.../common/impl/ContainerDataYaml.java | 10 ++--
.../states/endpoint/VersionEndpointTask.java | 3 --
.../container/keyvalue/KeyValueContainer.java | 8 +---
.../keyvalue/KeyValueContainerData.java | 20 +++++---
.../container/keyvalue/KeyValueHandler.java | 14 ++++--
.../keyvalue/helpers/KeyValueContainerUtil.java | 1 +
.../container/keyvalue/impl/KeyManagerImpl.java | 6 +++
.../container/keyvalue/impl/package-info.java | 22 +++++++++
.../container/ozoneimpl/ContainerReader.java | 21 +++++---
.../common/TestKeyValueContainerData.java | 8 +++-
.../common/impl/TestContainerDataYaml.java | 6 ++-
.../container/common/impl/TestContainerSet.java | 4 +-
.../keyvalue/TestChunkManagerImpl.java | 2 +-
.../container/keyvalue/TestKeyManagerImpl.java | 7 ++-
.../keyvalue/TestKeyValueContainer.java | 2 +-
.../container/ozoneimpl/TestOzoneContainer.java | 2 +-
.../test/resources/additionalfields.container | 1 +
.../src/test/resources/incorrect.container | 1 +
19 files changed, 147 insertions(+), 41 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
index 872d958..238fb09 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerData.java
@@ -53,12 +53,15 @@ public class ContainerData {
// State of the Container
private ContainerLifeCycleState state;
+ private final int maxSizeGB;
+
/** parameters for read/write statistics on the container. **/
private final AtomicLong readBytes;
private final AtomicLong writeBytes;
private final AtomicLong readCount;
private final AtomicLong writeCount;
private final AtomicLong bytesUsed;
+ private final AtomicLong keyCount;
private HddsVolume volume;
@@ -67,8 +70,9 @@ public class ContainerData {
* Creates a ContainerData Object, which holds metadata of the container.
* @param type - ContainerType
* @param containerId - ContainerId
+ * @param size - container maximum size
*/
- public ContainerData(ContainerType type, long containerId) {
+ public ContainerData(ContainerType type, long containerId, int size) {
this.containerType = type;
this.containerId = containerId;
this.layOutVersion = ChunkLayOutVersion.getLatestVersion().getVersion();
@@ -79,6 +83,8 @@ public class ContainerData {
this.writeCount = new AtomicLong(0L);
this.writeBytes = new AtomicLong(0L);
this.bytesUsed = new AtomicLong(0L);
+ this.keyCount = new AtomicLong(0L);
+ this.maxSizeGB = size;
}
/**
@@ -86,9 +92,10 @@ public class ContainerData {
* @param type - ContainerType
* @param containerId - ContainerId
* @param layOutVersion - Container layOutVersion
+ * @param size - Container maximum size
*/
public ContainerData(ContainerType type, long containerId, int
- layOutVersion) {
+ layOutVersion, int size) {
this.containerType = type;
this.containerId = containerId;
this.layOutVersion = layOutVersion;
@@ -99,6 +106,8 @@ public class ContainerData {
this.writeCount = new AtomicLong(0L);
this.writeBytes = new AtomicLong(0L);
this.bytesUsed = new AtomicLong(0L);
+ this.keyCount = new AtomicLong(0L);
+ this.maxSizeGB = size;
}
/**
@@ -134,6 +143,14 @@ public class ContainerData {
}
/**
+ * Return's maximum size of the container in GB.
+ * @return maxSizeGB
+ */
+ public int getMaxSizeGB() {
+ return maxSizeGB;
+ }
+
+ /**
* Returns the layOutVersion of the actual container data format.
* @return layOutVersion
*/
@@ -309,5 +326,34 @@ public class ContainerData {
return volume;
}
+ /**
+ * Increments the number of keys in the container.
+ */
+ public void incrKeyCount() {
+ this.keyCount.incrementAndGet();
+ }
+
+ /**
+ * Decrements number of keys in the container.
+ */
+ public void decrKeyCount() {
+ this.keyCount.decrementAndGet();
+ }
+
+ /**
+ * Returns number of keys in the container.
+ * @return key count
+ */
+ public long getKeyCount() {
+ return this.keyCount.get();
+ }
+
+ /**
+ * Set's number of keys in the container.
+ * @param count
+ */
+ public void setKeyCount(long count) {
+ this.keyCount.set(count);
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
index 6b8e6ee..4f4d588 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerDataYaml.java
@@ -32,6 +32,7 @@ import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.File;
+import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.Map;
@@ -47,7 +48,6 @@ import org.yaml.snakeyaml.nodes.ScalarNode;
import org.yaml.snakeyaml.nodes.Tag;
import org.yaml.snakeyaml.representer.Representer;
-import static org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData.YAML_FIELDS;
import static org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData.YAML_TAG;
/**
@@ -150,10 +150,11 @@ public final class ContainerDataYaml {
// When a new Container type is added, we need to add what fields need
// to be filtered here
if (type.equals(KeyValueContainerData.class)) {
+ List<String> yamlFields = KeyValueContainerData.getYamlFields();
// filter properties
for (Property prop : set) {
String name = prop.getName();
- if (YAML_FIELDS.contains(name)) {
+ if (yamlFields.contains(name)) {
filtered.add(prop);
}
}
@@ -183,9 +184,12 @@ public final class ContainerDataYaml {
long layOutVersion = (long) nodes.get("layOutVersion");
int lv = (int) layOutVersion;
+ long size = (long) nodes.get("maxSizeGB");
+ int maxSize = (int) size;
+
//When a new field is added, it needs to be added here.
KeyValueContainerData kvData = new KeyValueContainerData((long) nodes
- .get("containerId"), lv);
+ .get("containerId"), lv, maxSize);
kvData.setContainerDBType((String)nodes.get("containerDBType"));
kvData.setMetadataPath((String) nodes.get(
"metadataPath"));
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java
index e4cb4d5..d782b59 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/VersionEndpointTask.java
@@ -18,7 +18,6 @@ package org.apache.hadoop.ozone.container.common.states.endpoint;
import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto;
import org.apache.hadoop.ozone.OzoneConsts;
@@ -30,7 +29,6 @@ import org.apache.hadoop.ozone.container.ozoneimpl.OzoneContainer;
import org.apache.hadoop.ozone.protocol.VersionResponse;
import java.io.IOException;
-import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
@@ -67,7 +65,6 @@ public class VersionEndpointTask implements
rpcEndPoint.setVersion(response);
VolumeSet volumeSet = ozoneContainer.getVolumeSet();
Map<String, HddsVolume> volumeMap = volumeSet.getVolumeMap();
- List<HddsProtos.KeyValue> keyValues = versionResponse.getKeysList();
String scmId = response.getValue(OzoneConsts.SCM_ID);
String clusterId = response.getValue(OzoneConsts.CLUSTER_ID);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
index 72d50d6..474c625 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainer.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerLifeCycleState;
-import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException;
import org.apache.hadoop.io.IOUtils;
@@ -84,7 +83,6 @@ public class KeyValueContainer implements Container {
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private final KeyValueContainerData containerData;
- private long containerMaxSize;
private Configuration config;
public KeyValueContainer(KeyValueContainerData containerData, Configuration
@@ -95,9 +93,6 @@ public class KeyValueContainer implements Container {
"be null");
this.config = ozoneConfig;
this.containerData = containerData;
- this.containerMaxSize = (long) ozoneConfig.getInt(ScmConfigKeys
- .OZONE_SCM_CONTAINER_SIZE_GB, ScmConfigKeys
- .OZONE_SCM_CONTAINER_SIZE_DEFAULT) * 1024L * 1024L * 1024L;
}
@Override
@@ -111,9 +106,10 @@ public class KeyValueContainer implements Container {
File containerMetaDataPath = null;
//acquiring volumeset lock and container lock
volumeSet.acquireLock();
+ long maxSize = (containerData.getMaxSizeGB() * 1024L * 1024L * 1024L);
try {
HddsVolume containerVolume = volumeChoosingPolicy.chooseVolume(volumeSet
- .getVolumesList(), containerMaxSize);
+ .getVolumesList(), maxSize);
String containerBasePath = containerVolume.getHddsRootDir().toString();
long containerId = containerData.getContainerId();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
index 3b24468..ed2c6af 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerData.java
@@ -39,9 +39,9 @@ public class KeyValueContainerData extends ContainerData {
public static final Tag YAML_TAG = new Tag("KeyValueContainerData");
// Fields need to be stored in .container file.
- public static final List<String> YAML_FIELDS = Lists.newArrayList(
+ private static final List<String> YAML_FIELDS = Lists.newArrayList(
"containerType", "containerId", "layOutVersion", "state", "metadata",
- "metadataPath", "chunksPath", "containerDBType");
+ "metadataPath", "chunksPath", "containerDBType", "maxSizeGB");
// Path to Container metadata Level DB/RocksDB Store and .container file.
private String metadataPath;
@@ -60,9 +60,10 @@ public class KeyValueContainerData extends ContainerData {
/**
* Constructs KeyValueContainerData object.
* @param id - ContainerId
+ * @param size - maximum size of the container
*/
- public KeyValueContainerData(long id) {
- super(ContainerProtos.ContainerType.KeyValueContainer, id);
+ public KeyValueContainerData(long id, int size) {
+ super(ContainerProtos.ContainerType.KeyValueContainer, id, size);
this.numPendingDeletionBlocks = 0;
}
@@ -70,10 +71,11 @@ public class KeyValueContainerData extends ContainerData {
* Constructs KeyValueContainerData object.
* @param id - ContainerId
* @param layOutVersion
+ * @param size - maximum size of the container
*/
- public KeyValueContainerData(long id,
- int layOutVersion) {
- super(ContainerProtos.ContainerType.KeyValueContainer, id, layOutVersion);
+ public KeyValueContainerData(long id, int layOutVersion, int size) {
+ super(ContainerProtos.ContainerType.KeyValueContainer, id, layOutVersion,
+ size);
this.numPendingDeletionBlocks = 0;
}
@@ -205,4 +207,8 @@ public class KeyValueContainerData extends ContainerData {
return builder.build();
}
+
+ public static List<String> getYamlFields() {
+ return YAML_FIELDS;
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
index d174383..b615acd 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.PutSmallFileRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.Type;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.container.common.helpers
.StorageContainerException;
import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
@@ -104,6 +105,7 @@ public class KeyValueHandler extends Handler {
private final KeyManager keyManager;
private final ChunkManager chunkManager;
private VolumeChoosingPolicy volumeChoosingPolicy;
+ private final int maxContainerSizeGB;
// TODO : Add metrics and populate it.
@@ -125,6 +127,8 @@ public class KeyValueHandler extends Handler {
chunkManager = new ChunkManagerImpl();
// TODO: Add supoort for different volumeChoosingPolicies.
volumeChoosingPolicy = new RoundRobinVolumeChoosingPolicy();
+ maxContainerSizeGB = config.getInt(ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_GB,
+ ScmConfigKeys.OZONE_SCM_CONTAINER_SIZE_DEFAULT);
}
@Override
@@ -207,7 +211,7 @@ public class KeyValueHandler extends Handler {
}
KeyValueContainerData newContainerData = new KeyValueContainerData(
- containerID);
+ containerID, maxContainerSizeGB);
// TODO: Add support to add metadataList to ContainerData. Add metadata
// to container during creation.
KeyValueContainer newContainer = new KeyValueContainer(
@@ -565,8 +569,8 @@ public class KeyValueHandler extends Handler {
try {
checkContainerOpen(kvContainer);
- BlockID blockID = BlockID.getFromProtobuf(
- putSmallFileReq.getKey().getKeyData().getBlockID());
+ BlockID blockID = BlockID.getFromProtobuf(putSmallFileReq.getKey()
+ .getKeyData().getBlockID());
KeyData keyData = KeyData.getFromProtoBuf(
putSmallFileReq.getKey().getKeyData());
Preconditions.checkNotNull(keyData);
@@ -613,8 +617,8 @@ public class KeyValueHandler extends Handler {
GetSmallFileRequestProto getSmallFileReq = request.getGetSmallFile();
try {
- BlockID blockID = BlockID.getFromProtobuf(
- getSmallFileReq.getKey().getBlockID());
+ BlockID blockID = BlockID.getFromProtobuf(getSmallFileReq.getKey()
+ .getBlockID());
KeyData responseData = keyManager.getKey(kvContainer, blockID);
ContainerProtos.ChunkInfo chunkInfo = null;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java
index 029e94d..4c17dce 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java
@@ -302,6 +302,7 @@ public final class KeyValueContainerUtil {
}
}).sum();
containerData.setBytesUsed(bytesUsed);
+ containerData.setKeyCount(liveKeys.size());
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyManagerImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyManagerImpl.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyManagerImpl.java
index 40736e5..6a8897a 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyManagerImpl.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyManagerImpl.java
@@ -84,6 +84,9 @@ public class KeyManagerImpl implements KeyManager {
Preconditions.checkNotNull(db, "DB cannot be null here");
db.put(Longs.toByteArray(data.getLocalID()), data.getProtoBufMessage()
.toByteArray());
+
+ // Increment keycount here
+ container.getContainerData().incrKeyCount();
}
/**
@@ -148,6 +151,9 @@ public class KeyManagerImpl implements KeyManager {
NO_SUCH_KEY);
}
db.delete(kKey);
+
+ // Decrement keycount here
+ container.getContainerData().decrKeyCount();
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/package-info.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/package-info.java
new file mode 100644
index 0000000..525d51b
--- /dev/null
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.keyvalue.impl;
+/**
+ This package contains chunk manager and key manager implementation for
+ keyvalue container type.
+ **/
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java
index 68823bc..50a2d08 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java
@@ -109,11 +109,9 @@ public class ContainerReader implements Runnable {
.getContainerFile(metadataPath, containerName);
File checksumFile = KeyValueContainerLocationUtil
.getContainerCheckSumFile(metadataPath, containerName);
- File dbFile = KeyValueContainerLocationUtil
- .getContainerDBFile(metadataPath, containerName);
- if (containerFile.exists() && checksumFile.exists() &&
- dbFile.exists()) {
- verifyContainerFile(containerFile, checksumFile, dbFile);
+ if (containerFile.exists() && checksumFile.exists()) {
+ verifyContainerFile(containerName, containerFile,
+ checksumFile);
} else {
LOG.error("Missing container metadata files for Container: " +
"{}", containerName);
@@ -129,8 +127,8 @@ public class ContainerReader implements Runnable {
}
}
- private void verifyContainerFile(File containerFile, File checksumFile,
- File dbFile) {
+ private void verifyContainerFile(String containerName, File containerFile,
+ File checksumFile) {
try {
ContainerData containerData = ContainerDataYaml.readContainerFile(
containerFile);
@@ -139,6 +137,15 @@ public class ContainerReader implements Runnable {
case KeyValueContainer:
KeyValueContainerData keyValueContainerData = (KeyValueContainerData)
containerData;
+ File dbFile = KeyValueContainerLocationUtil
+ .getContainerDBFile(new File(containerFile.getParent()),
+ containerName);
+ if (!dbFile.exists()) {
+ LOG.error("Container DB file is missing for Container {}, skipping " +
+ "this", containerName);
+ // Don't further process this container, as it is missing db file.
+ return;
+ }
KeyValueContainerUtil.parseKeyValueContainerData(keyValueContainerData,
containerFile, checksumFile, dbFile, config);
KeyValueContainer keyValueContainer = new KeyValueContainer(
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
index 249b0fe..16c4c2a 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestKeyValueContainerData.java
@@ -31,6 +31,7 @@ import java.util.concurrent.atomic.AtomicLong;
*/
public class TestKeyValueContainerData {
+ private static final int MAXSIZE = 5;
@Test
public void testKeyValueData() {
long containerId = 1L;
@@ -42,7 +43,8 @@ public class TestKeyValueContainerData {
.ContainerLifeCycleState.CLOSED;
AtomicLong val = new AtomicLong(0);
- KeyValueContainerData kvData = new KeyValueContainerData(containerId);
+ KeyValueContainerData kvData = new KeyValueContainerData(containerId,
+ MAXSIZE);
assertEquals(containerType, kvData.getContainerType());
assertEquals(containerId, kvData.getContainerId());
@@ -54,6 +56,8 @@ public class TestKeyValueContainerData {
assertEquals(val.get(), kvData.getWriteBytes());
assertEquals(val.get(), kvData.getReadCount());
assertEquals(val.get(), kvData.getWriteCount());
+ assertEquals(val.get(), kvData.getKeyCount());
+ assertEquals(MAXSIZE, kvData.getMaxSizeGB());
kvData.setState(state);
kvData.setContainerDBType(containerDBType);
@@ -63,6 +67,7 @@ public class TestKeyValueContainerData {
kvData.incrWriteBytes(10);
kvData.incrReadCount();
kvData.incrWriteCount();
+ kvData.incrKeyCount();
assertEquals(state, kvData.getState());
assertEquals(containerDBType, kvData.getContainerDBType());
@@ -73,6 +78,7 @@ public class TestKeyValueContainerData {
assertEquals(10, kvData.getWriteBytes());
assertEquals(1, kvData.getReadCount());
assertEquals(1, kvData.getWriteCount());
+ assertEquals(1, kvData.getKeyCount());
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
index e1b7bd2..41d8315 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
@@ -37,6 +37,7 @@ import static org.junit.Assert.fail;
*/
public class TestContainerDataYaml {
+ private static final int MAXSIZE = 5;
@Test
public void testCreateContainerFile() throws IOException {
String path = new FileSystemTestHelper().getTestRootDir();
@@ -45,7 +46,8 @@ public class TestContainerDataYaml {
File filePath = new File(new FileSystemTestHelper().getTestRootDir());
filePath.mkdirs();
- KeyValueContainerData keyValueContainerData = new KeyValueContainerData(Long.MAX_VALUE);
+ KeyValueContainerData keyValueContainerData = new KeyValueContainerData(
+ Long.MAX_VALUE, MAXSIZE);
keyValueContainerData.setContainerDBType("RocksDB");
keyValueContainerData.setMetadataPath(path);
keyValueContainerData.setChunksPath(path);
@@ -72,6 +74,7 @@ public class TestContainerDataYaml {
.getState());
assertEquals(1, kvData.getLayOutVersion());
assertEquals(0, kvData.getMetadata().size());
+ assertEquals(MAXSIZE, kvData.getMaxSizeGB());
// Update ContainerData.
kvData.addMetadata("VOLUME", "hdfs");
@@ -101,6 +104,7 @@ public class TestContainerDataYaml {
assertEquals(2, kvData.getMetadata().size());
assertEquals("hdfs", kvData.getMetadata().get("VOLUME"));
assertEquals("ozone", kvData.getMetadata().get("OWNER"));
+ assertEquals(MAXSIZE, kvData.getMaxSizeGB());
FileUtil.fullyDelete(filePath);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerSet.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerSet.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerSet.java
index 55d6773..6ec1fe4 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerSet.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerSet.java
@@ -53,7 +53,7 @@ public class TestContainerSet {
ContainerProtos.ContainerLifeCycleState state = ContainerProtos
.ContainerLifeCycleState.CLOSED;
- KeyValueContainerData kvData = new KeyValueContainerData(containerId);
+ KeyValueContainerData kvData = new KeyValueContainerData(containerId, 5);
kvData.setState(state);
KeyValueContainer keyValueContainer = new KeyValueContainer(kvData, new
OzoneConfiguration());
@@ -163,7 +163,7 @@ public class TestContainerSet {
private ContainerSet createContainerSet() throws StorageContainerException {
ContainerSet containerSet = new ContainerSet();
for (int i=0; i<10; i++) {
- KeyValueContainerData kvData = new KeyValueContainerData(i);
+ KeyValueContainerData kvData = new KeyValueContainerData(i, 5);
if (i%2 == 0) {
kvData.setState(ContainerProtos.ContainerLifeCycleState.CLOSED);
} else {
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestChunkManagerImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestChunkManagerImpl.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestChunkManagerImpl.java
index 6becf39..760d873 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestChunkManagerImpl.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestChunkManagerImpl.java
@@ -79,7 +79,7 @@ public class TestChunkManagerImpl {
Mockito.when(volumeChoosingPolicy.chooseVolume(anyList(), anyLong()))
.thenReturn(hddsVolume);
- keyValueContainerData = new KeyValueContainerData(1L);
+ keyValueContainerData = new KeyValueContainerData(1L, 5);
keyValueContainer = new KeyValueContainer(keyValueContainerData, config);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyManagerImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyManagerImpl.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyManagerImpl.java
index 62d9382..a90cf80 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyManagerImpl.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyManagerImpl.java
@@ -79,7 +79,7 @@ public class TestKeyManagerImpl {
Mockito.when(volumeChoosingPolicy.chooseVolume(anyList(), anyLong()))
.thenReturn(hddsVolume);
- keyValueContainerData = new KeyValueContainerData(1L);
+ keyValueContainerData = new KeyValueContainerData(1L, 5);
keyValueContainer = new KeyValueContainer(
keyValueContainerData, config);
@@ -104,9 +104,11 @@ public class TestKeyManagerImpl {
@Test
public void testPutAndGetKey() throws Exception {
+ assertEquals(0, keyValueContainer.getContainerData().getKeyCount());
//Put Key
keyManager.putKey(keyValueContainer, keyData);
+ assertEquals(1, keyValueContainer.getContainerData().getKeyCount());
//Get Key
KeyData fromGetKeyData = keyManager.getKey(keyValueContainer,
keyData.getBlockID());
@@ -123,10 +125,13 @@ public class TestKeyManagerImpl {
@Test
public void testDeleteKey() throws Exception {
try {
+ assertEquals(0, keyValueContainer.getContainerData().getKeyCount());
//Put Key
keyManager.putKey(keyValueContainer, keyData);
+ assertEquals(1, keyValueContainer.getContainerData().getKeyCount());
//Delete Key
keyManager.deleteKey(keyValueContainer, blockID);
+ assertEquals(0, keyValueContainer.getContainerData().getKeyCount());
try {
keyManager.getKey(keyValueContainer, blockID);
fail("testDeleteKey");
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
index de5f432..e55ea57 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
@@ -86,7 +86,7 @@ public class TestKeyValueContainer {
Mockito.when(volumeChoosingPolicy.chooseVolume(anyList(), anyLong()))
.thenReturn(hddsVolume);
- keyValueContainerData = new KeyValueContainerData(1L);
+ keyValueContainerData = new KeyValueContainerData(1L, 5);
keyValueContainer = new KeyValueContainer(
keyValueContainerData, conf);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
index cf4bb62..26e1c77 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
@@ -66,7 +66,7 @@ public class TestOzoneContainer {
volumeChoosingPolicy = new RoundRobinVolumeChoosingPolicy();
for (int i=0; i<10; i++) {
- keyValueContainerData = new KeyValueContainerData(i);
+ keyValueContainerData = new KeyValueContainerData(i, 1);
keyValueContainer = new KeyValueContainer(
keyValueContainerData, conf);
keyValueContainer.create(volumeSet, volumeChoosingPolicy, scmId);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/resources/additionalfields.container
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/resources/additionalfields.container b/hadoop-hdds/container-service/src/test/resources/additionalfields.container
index b19ec44..9027538 100644
--- a/hadoop-hdds/container-service/src/test/resources/additionalfields.container
+++ b/hadoop-hdds/container-service/src/test/resources/additionalfields.container
@@ -5,6 +5,7 @@ containerId: 9223372036854775807
containerType: KeyValueContainer
metadataPath: /hdds/current/aed-fg4-hji-jkl/containerdir0/1
layOutVersion: 1
+maxSizeGB: 5
metadata: {OWNER: ozone, VOLUME: hdfs}
state: CLOSED
aclEnabled: true
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1f4b3b5/hadoop-hdds/container-service/src/test/resources/incorrect.container
----------------------------------------------------------------------
diff --git a/hadoop-hdds/container-service/src/test/resources/incorrect.container b/hadoop-hdds/container-service/src/test/resources/incorrect.container
index 0d2bfd1..6848484 100644
--- a/hadoop-hdds/container-service/src/test/resources/incorrect.container
+++ b/hadoop-hdds/container-service/src/test/resources/incorrect.container
@@ -5,5 +5,6 @@ containerId: 9223372036854775807
containerType: KeyValueContainer
metadataPath: /hdds/current/aed-fg4-hji-jkl/containerdir0/1
layOutVersion: 1
+maxSizeGB: 5
metadata: {OWNER: ozone, VOLUME: hdfs}
state: INVALID
\ No newline at end of file
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org