You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2015/07/14 01:06:44 UTC

hadoop git commit: HDFS-8677. Ozone: Introduce KeyValueContainerDatasetSpi. (Contributed by Arpit Agarwal)

Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 36da79818 -> 9e63be7ef


HDFS-8677. Ozone: Introduce KeyValueContainerDatasetSpi. (Contributed by Arpit Agarwal)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9e63be7e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9e63be7e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9e63be7e

Branch: refs/heads/HDFS-7240
Commit: 9e63be7ef8c862b50af7b8b513f2da96463d27f8
Parents: 36da798
Author: Arpit Agarwal <ar...@apache.org>
Authored: Mon Jul 13 16:06:13 2015 -0700
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Mon Jul 13 16:06:13 2015 -0700

----------------------------------------------------------------------
 .../hadoop-hdfs/CHANGES-HDFS-7240.txt           |   2 +
 .../server/datanode/dataset/DatasetSpi.java     |   2 +-
 .../KeyValueContainer.java                      | 155 +++++++++++++++++++
 .../KeyValueContainerDatasetSpi.java            | 145 +++++++++++++++++
 .../KeyValueContainerIterator.java              |  99 ++++++++++++
 .../exceptions/BadGenerationStampException.java |  40 +++++
 .../exceptions/BadTransactionIdException.java   |  39 +++++
 .../exceptions/BlobAlreadyExistsException.java  |  35 +++++
 .../ContainerAlreadyExistsException.java        |  36 +++++
 .../StorageContainerNotFoundException.java      |  36 +++++
 10 files changed, 588 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-7240.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-7240.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-7240.txt
index ee85328..a17c624 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-7240.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-7240.txt
@@ -31,3 +31,5 @@
     HDFS-8680. OzoneHandler : Add Local StorageHandler support for volumes.
     (Anu Engineer via Arpit Agarwal)
 
+    HDFS-8677. Ozone: Introduce KeyValueContainerDatasetSpi. (Arpit Agarwal)
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/dataset/DatasetSpi.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/dataset/DatasetSpi.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/dataset/DatasetSpi.java
index a210155..38293fa 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/dataset/DatasetSpi.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/dataset/DatasetSpi.java
@@ -50,7 +50,7 @@ import java.util.Set;
 @InterfaceStability.Unstable
 public interface DatasetSpi<V extends VolumeSpi> {
   /**
-   * A factory for creating {@link FsDatasetSpi} objects.
+   * A factory for creating {@link DatasetSpi} objects.
    */
   abstract class Factory {
     /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainer.java
new file mode 100644
index 0000000..15c7e69
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainer.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdfs.server.datanode.dataset.VolumeSpi;
+import org.apache.hadoop.storagecontainer.protocol.StorageContainer;
+
+import java.io.IOException;
+
+/**
+ * Interface to an internal key-value storage container. Allows updating and
+ * querying the contents of the container.
+ */
+@InterfaceAudience.Private
+public interface KeyValueContainer {
+  void init(StorageContainer containerKey) throws IOException;
+
+  /**
+   * Return the underlying storage volume for this container.
+   *
+   * @return underlying storage volume for this container.
+   */
+  VolumeSpi getVolume();
+
+  /**
+   * Get a new iterator to the container contents. The iterator must be
+   * initialized via {@link KeyValueContainerIterator#seek(byte[])} or
+   * {@link KeyValueContainerIterator#seekToFirst()} to be valid.
+   *
+   * @return an iterator over known key-value pairs in the container.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  KeyValueContainerIterator getIterator() throws IOException;
+
+  /**
+   * Atomically insert a key-value pair into the container. If the insert is
+   * successful then the transaction id of the container is updated.
+   *
+   * If the key already exists then its value is updated. This can be
+   * thought of as an upsert operation.
+   *
+   * Insertion and transaction id update must be atomic i.e. both or neither.
+   *
+   * @param txid Transaction id corresponding to the operation.
+   * @param key key to insert into the container.
+   * @param value value corresponding to the given key.
+   *
+   * @return reference to this object to allow chaining calls.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  KeyValueContainer put(long txid, byte[] key, byte[] value)
+      throws IOException;
+
+  /**
+   * Retrieve the value associated with the given key.
+   *
+   * @param key
+   *
+   * @return value associated with the given key.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  byte[] get(byte[] key) throws IOException;
+
+  /**
+   * Remove the key and its associated value from the container. If the
+   * remove is successful then the transaction id of the container is
+   * updated.
+   *
+   * Removal and transaction id update must be atomic i.e. both or neither.
+   *
+   * @param txid Transaction id corresponding to the operation.
+   * @param key
+   *
+   * @return reference to this object to allow chaining puts.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  KeyValueContainer remove(long txid, byte[] key) throws IOException;
+
+  /**
+   * Delete the container if it is empty.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  void delete() throws IOException;
+
+  /**
+   * Close the container, releasing any in-memory resources associated with
+   * the container.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  void close() throws IOException;
+
+  /**
+   * Retrieve the container ID.
+   * @return the container ID.
+   */
+  long getContainerId();
+
+  /**
+   * Return the generation stamp associated with the container.
+   *
+   * @return generation stamp associated with the container.
+   */
+  long getGenerationStamp();
+
+  /**
+   * Update the generation stamp associated with the container.
+   *
+   * @param newGenerationStamp
+   *
+   * @return reference to this object to allow chaining puts.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  KeyValueContainer updateGenerationStamp(long newGenerationStamp)
+      throws IOException;
+
+  /**
+   * Retrieve the transaction id associated with the last successful
+   * update to this container.
+   *
+   * @return transaction id associated with the last successful update.
+   */
+  long getTransactionId();
+
+  /**
+   * Retrieve the key associated with this container. The key is a dummy
+   * StorageContainer object.
+   *
+   * @return container key
+   */
+  StorageContainer toContainerKey();
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerDatasetSpi.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerDatasetSpi.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerDatasetSpi.java
new file mode 100644
index 0000000..b2f3ba5
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerDatasetSpi.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdfs.server.datanode.dataset.*;
+import org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset.exceptions.*;
+import org.apache.hadoop.storagecontainer.protocol.StorageContainer;
+
+import java.io.IOException;
+import java.nio.channels.FileChannel;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * Interface abstracting storage operations to support containers. Exposes
+ * the following two storage primitives:
+ *
+ *  1. Key-value containers that support atomic put, delete and replace and
+ *     support lexical iteration. The length of individual keys and values
+ *     depends on the specific implementation but it is expected to be
+ *     limited to a few KB.
+ *
+ *  2. Named blobs which can be up to a few GB in size. The exact limit is
+ *     left to implementors. Blobs support streaming writes but not atomic
+ *     puts.
+ */
+@InterfaceAudience.Private
+public interface KeyValueContainerDatasetSpi<V extends VolumeSpi>
+    extends DatasetSpi<VolumeSpi> {
+
+  /**
+   * Create a key-value container.
+   *
+   * @param containerKey key identifying the container. The id, generation
+   *                     stamp and blockpoolId fields must be initialized.
+   *
+   * @return a reference to the newly created container.
+   *
+   * @throws ContainerAlreadyExistsException if the container with a matching
+   *                                         [id+blockpoolId] combination is
+   *                                         already present.
+   * @throws IOException if the operation failed for any other reason.
+   */
+  KeyValueContainer createContainer(StorageContainer containerKey)
+      throws IOException, ContainerAlreadyExistsException;
+
+  /**
+   * Return a read-only set view of known key-value containers in a
+   * given block pool. The set of containers can change while an
+   * iteration is in progress causing some newly added containers to
+   * be omitted in an iteration.
+   *
+   * @param bpid the block pool ID in which to enumerate containers.
+   *
+   * @return a read only set view of all known key-value containers.
+   */
+  Set<StorageContainer> getContainerSet(String bpid);
+
+  /**
+   * Lookup a key-value container. The provided generation stamp and
+   * transactionId must match the latest values known to the dataset.
+   *
+   * @param containerKey key identifying the container. The id, generation
+   *                     stamp, blockpoolId and transactionId fields must
+   *                     be initialized.
+   *
+   * @return a reference to an existing container.
+   *
+   * @throws StorageContainerNotFoundException if the container was not found.
+   * @throws BadGenerationStampException if the provided generation stamp does
+   *                                     not match what we have.
+   * @throws BadTransactionIdException if the provided transactionId does not
+   *                                   match what we have.
+   * @throws IOException if the operation failed for any other reason.
+   */
+  KeyValueContainer lookupContainer(StorageContainer containerKey)
+      throws BadGenerationStampException, BadTransactionIdException,
+             StorageContainerNotFoundException, IOException;
+
+  /**
+   * Delete a storage container. This deletes all key-value pairs in
+   * the container. All container resources must be disposed off before
+   * calling deleteContainer.
+   *
+   * The caller must ensure that a reference to the container is not in
+   * use before attempting to delete the container.
+   *
+   * @throws StorageContainerNotFoundException if the container was not found.
+   * @throws IOException if the operation failed for any other reason.
+   */
+  void deleteContainer(StorageContainer container)
+      throws StorageContainerNotFoundException, IOException;
+
+  /**
+   * Create a blob. Returns a FileChannel that can be used to write data
+   * into the blob. BlobIds must be unique across the storage service.
+   *
+   * @return a Channel for async writes into the blob. Any attempts
+   *         to read from the channel will throw an exception.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  FileChannel createBlob(String bpid, UUID blobId) throws IOException;
+
+  /**
+   * Open a channel to read blob data.
+   *
+   * @return a Channel for async reads from the blob. Any attempts
+   *         to write to the channel will throw an exception.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  FileChannel openBlob(String bpid, UUID blobId) throws IOException;
+
+  /**
+   * Delete a blob.
+   */
+  void deleteBlob(String bpid, UUID blobId) throws IOException;
+
+  /**
+   * Return a read-only set view of known key-value blobs. The
+   * set of blobs can change while an iteration is in progress
+   * causing some newly added blobs to be omitted in the iteration.
+   *
+   * @return a read-only set view of all known blobs.
+   */
+  Set<UUID> getBlobs(String bpid);
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerIterator.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerIterator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerIterator.java
new file mode 100644
index 0000000..47a159e
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/KeyValueContainerIterator.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+import java.io.IOException;
+import java.util.NoSuchElementException;
+
+
+/**
+ * A lexical iterator over a storage container of key-value pairs. The
+ * iterator must be positioned using {@link #seek(byte[])} or
+ * {@link #seekToFirst()} before the first use.
+ *
+ * Each Iterator *must* be disposed off by invoking {@link #dispose()} when
+ * done using it else the implementation may leak resources.
+ */
+@InterfaceAudience.Private
+public interface KeyValueContainerIterator {
+
+  /**
+   * Position the iterator at the beginning of the container.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  void seekToFirst() throws IOException;
+
+  /**
+   * Positions the iterator at or just before the specified key.
+   * If the key exists then a subsequent {@link #nextKey()} call
+   * must return the same key.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  void seek(byte[] key) throws IOException;
+
+  /**
+   * Release the resources associated with this iterator.
+   *
+   * @throws IOException if the operation failed for any reason.
+   */
+  void dispose() throws IOException;
+
+  /**
+   * Return true if there is an element beyond the current key.
+   *
+   * @return true if there is an element beyond the current key.
+   */
+  boolean hasNext();
+
+  /**
+   * Advance the iterator by one element.
+   *
+   * @throws NoSuchElementException if there are no more keys in the
+   *         container.
+   */
+  void next();
+
+  /**
+   * Retrieve the key at the iterator position.
+   * Retrieve the next key in lexical order if the iterator was positioned
+   * at a non-existent key via {@link #seek(byte[])}.
+   *
+   * @return byte representation of the key corresponding to the next
+   *         key-value pair in the container.
+   *
+   * @throws NoSuchElementException if there are no more keys in the
+   *         container.
+   */
+  byte[] nextKey();
+
+  /**
+   * Retrieve the value associated with the next key in the container.
+   *
+   * @return byte representation of the value corresponding to the next
+   *         key-value pair in the container.
+   *
+   * @throws NoSuchElementException if there are no more keys in the
+   *         container.
+   */
+  byte[] nextValue();
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadGenerationStampException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadGenerationStampException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadGenerationStampException.java
new file mode 100644
index 0000000..f18bfc4
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadGenerationStampException.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset.exceptions;
+
+import java.io.IOException;
+
+
+/**
+ * Exception thrown when the expected and actual generation stamp of a
+ * container do not match. It indicates that either the caller or the
+ * dataset have outdated state.
+ */
+public class BadGenerationStampException extends IOException {
+
+  private static final long serialVersionUID = 0L;
+
+  /**
+   * A StorageContainerNotFoundException with a detail message.
+   */
+  public BadGenerationStampException(long containerId, long expected, long requested) {
+    super("For containerId " + containerId + ", expected generation stamp " +
+              expected + ", requested generation stamp = " + requested);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadTransactionIdException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadTransactionIdException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadTransactionIdException.java
new file mode 100644
index 0000000..5323448
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BadTransactionIdException.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset.exceptions;
+
+import java.io.IOException;
+
+/**
+ * Exception thrown when the expected and actual last transactionId of a
+ * container do not match. It indicates that either the caller or the
+ * dataset have outdated state.
+ */
+public class BadTransactionIdException extends IOException {
+
+  private static final long serialVersionUID = 0L;
+
+  /**
+   * A StorageContainerNotFoundException with a detail message.
+   */
+  public BadTransactionIdException(long containerId, long expected, long requested) {
+    super("For containerId " + containerId + ", expected transactionId " +
+              expected + ", did not match received transactionId " + requested);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BlobAlreadyExistsException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BlobAlreadyExistsException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BlobAlreadyExistsException.java
new file mode 100644
index 0000000..cd2d232
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/BlobAlreadyExistsException.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset.exceptions;
+
+
+import java.io.IOException;
+import java.util.UUID;
+
+public class BlobAlreadyExistsException extends IOException {
+
+  private static final long serialVersionUID = 0L;
+
+  /**
+   * A BlobAlreadyExistsException with a detail message.
+   */
+  public BlobAlreadyExistsException(String bpid, UUID blobId) {
+    super("Blob " + blobId + " in BlockPool " + bpid + " already exists");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/ContainerAlreadyExistsException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/ContainerAlreadyExistsException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/ContainerAlreadyExistsException.java
new file mode 100644
index 0000000..c53baba
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/ContainerAlreadyExistsException.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset.exceptions;
+
+import org.apache.hadoop.storagecontainer.protocol.StorageContainer;
+
+import java.io.IOException;
+
+
+public class ContainerAlreadyExistsException extends IOException {
+  private static final long serialVersionUID = 0L;
+
+  public ContainerAlreadyExistsException(StorageContainer container) {
+    super("Container " + container + " already exists.");
+  }
+
+  public ContainerAlreadyExistsException(StorageContainer container, Throwable cause) {
+    super("Container " + container + " already exists.", cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e63be7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/StorageContainerNotFoundException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/StorageContainerNotFoundException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/StorageContainerNotFoundException.java
new file mode 100644
index 0000000..a516806
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/keyvaluecontainerdataset/exceptions/StorageContainerNotFoundException.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.keyvaluecontainerdataset.exceptions;
+
+import java.io.IOException;
+
+
+public class StorageContainerNotFoundException extends IOException {
+
+  private static final long serialVersionUID = 0L;
+
+  /**
+   * A StorageContainerNotFoundException with a detail message.
+   *
+   * @param msg the detail message.
+   */
+  public StorageContainerNotFoundException(long containerId, String bpid) {
+    super("ContainerId " + containerId + " in BlockPool " + bpid + " was not found");
+  }
+}