You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2017/12/05 21:46:38 UTC

hadoop git commit: HDFS-12713. [READ] Refactor FileRegion and BlockAliasMap to separate out HDFS metadata and PROVIDED storage metadata. Contributed by Ewan Higgs

Repository: hadoop
Updated Branches:
  refs/heads/HDFS-9806 5caec5cf7 -> 52f562486


HDFS-12713. [READ] Refactor FileRegion and BlockAliasMap to separate out HDFS metadata and PROVIDED storage metadata. Contributed by Ewan Higgs


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/52f56248
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/52f56248
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/52f56248

Branch: refs/heads/HDFS-9806
Commit: 52f5624861fc68493f3124ac83cd47d377b78e1f
Parents: 5caec5c
Author: Virajith Jalaparti <vi...@apache.org>
Authored: Tue Dec 5 13:46:30 2017 -0800
Committer: Virajith Jalaparti <vi...@apache.org>
Committed: Tue Dec 5 13:46:30 2017 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   | 10 +--
 .../AliasMapProtocolServerSideTranslatorPB.java | 10 +++
 ...yAliasMapProtocolClientSideTranslatorPB.java | 17 ++++-
 .../apache/hadoop/hdfs/protocolPB/PBHelper.java |  2 +-
 .../hdfs/server/aliasmap/InMemoryAliasMap.java  |  7 +-
 .../aliasmap/InMemoryAliasMapProtocol.java      |  7 ++
 .../aliasmap/InMemoryLevelDBAliasMapServer.java | 13 +++-
 .../blockmanagement/ProvidedStorageMap.java     |  8 ++-
 .../hadoop/hdfs/server/common/FileRegion.java   | 30 ++------
 .../common/blockaliasmap/BlockAliasMap.java     | 14 ++--
 .../impl/InMemoryLevelDBAliasMapClient.java     | 24 ++++++-
 .../impl/LevelDBFileRegionAliasMap.java         | 22 ++++--
 .../impl/TextFileRegionAliasMap.java            | 76 ++++++++++++--------
 .../fsdataset/impl/ProvidedVolumeImpl.java      | 41 ++++++-----
 .../hadoop/hdfs/server/namenode/NameNode.java   |  6 +-
 .../hdfs/server/protocol/NamespaceInfo.java     |  4 ++
 .../src/main/proto/AliasMapProtocol.proto       |  8 +++
 .../src/main/resources/hdfs-default.xml         | 23 +++++-
 .../blockmanagement/TestProvidedStorageMap.java |  4 +-
 .../impl/TestInMemoryLevelDBAliasMapClient.java | 41 +++++------
 .../impl/TestLevelDBFileRegionAliasMap.java     | 10 +--
 .../impl/TestLevelDbMockAliasMapClient.java     | 19 +++--
 .../impl/TestTextBlockAliasMap.java             | 55 +++++++++++---
 .../fsdataset/impl/TestProvidedImpl.java        |  9 ++-
 .../hdfs/server/namenode/FileSystemImage.java   |  4 ++
 .../hdfs/server/namenode/ImageWriter.java       | 14 +++-
 .../hdfs/server/namenode/NullBlockAliasMap.java |  6 +-
 .../hadoop/hdfs/server/namenode/TreePath.java   |  3 +-
 .../TestNameNodeProvidedImplementation.java     | 24 +++----
 29 files changed, 346 insertions(+), 165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 7db0a8d..2ef2bf0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -342,17 +342,19 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final String DFS_PROVIDER_STORAGEUUID = "dfs.provided.storage.id";
   public static final String DFS_PROVIDER_STORAGEUUID_DEFAULT =  "DS-PROVIDED";
   public static final String DFS_PROVIDED_ALIASMAP_CLASS = "dfs.provided.aliasmap.class";
+  public static final String DFS_PROVIDED_ALIASMAP_LOAD_RETRIES = "dfs.provided.aliasmap.load.retries";
 
   public static final String DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER = "dfs.provided.aliasmap.text.delimiter";
   public static final String DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER_DEFAULT = ",";
 
-  public static final String DFS_PROVIDED_ALIASMAP_TEXT_READ_PATH = "dfs.provided.aliasmap.text.read.path";
-  public static final String DFS_PROVIDED_ALIASMAP_TEXT_PATH_DEFAULT = "file:///tmp/blocks.csv";
+  public static final String DFS_PROVIDED_ALIASMAP_TEXT_READ_FILE = "dfs.provided.aliasmap.text.read.file";
+  public static final String DFS_PROVIDED_ALIASMAP_TEXT_READ_FILE_DEFAULT = "file:///tmp/blocks.csv";
 
   public static final String DFS_PROVIDED_ALIASMAP_TEXT_CODEC = "dfs.provided.aliasmap.text.codec";
-  public static final String DFS_PROVIDED_ALIASMAP_TEXT_WRITE_PATH = "dfs.provided.aliasmap.text.write.path";
+  public static final String DFS_PROVIDED_ALIASMAP_TEXT_WRITE_DIR = "dfs.provided.aliasmap.text.write.dir";
+  public static final String DFS_PROVIDED_ALIASMAP_TEXT_WRITE_DIR_DEFAULT = "file:///tmp/";
 
-  public static final String DFS_PROVIDED_ALIASMAP_LEVELDB_PATH = "dfs.provided.aliasmap.leveldb.read.path";
+  public static final String DFS_PROVIDED_ALIASMAP_LEVELDB_PATH = "dfs.provided.aliasmap.leveldb.path";
 
   public static final String  DFS_LIST_LIMIT = "dfs.ls.limit";
   public static final int     DFS_LIST_LIMIT_DEFAULT = 1000;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/AliasMapProtocolServerSideTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/AliasMapProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/AliasMapProtocolServerSideTranslatorPB.java
index 15a1124..8d89c40 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/AliasMapProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/AliasMapProtocolServerSideTranslatorPB.java
@@ -121,4 +121,14 @@ public class AliasMapProtocolServerSideTranslatorPB
       throw new ServiceException(e);
     }
   }
+
+  public BlockPoolResponseProto getBlockPoolId(RpcController controller,
+      BlockPoolRequestProto req) throws ServiceException {
+    try {
+      String bpid = aliasMap.getBlockPoolId();
+      return BlockPoolResponseProto.newBuilder().setBlockPoolId(bpid).build();
+    } catch (IOException e) {
+      throw new ServiceException(e);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java
index 7f6cc91..fc23c88 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java
@@ -73,7 +73,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
           RPC.getProtocolVersion(AliasMapProtocolPB.class), aliasMapAddr, null,
           conf, NetUtils.getDefaultSocketFactory(conf), 0);
     } catch (IOException e) {
-      e.printStackTrace();
+      throw new RuntimeException(
+          "Error in connecting to " + addr + " Got: " + e);
     }
   }
 
@@ -93,8 +94,7 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
           .stream()
           .map(kv -> new FileRegion(
               PBHelperClient.convert(kv.getKey()),
-              PBHelperClient.convert(kv.getValue()),
-              null
+              PBHelperClient.convert(kv.getValue())
           ))
           .collect(Collectors.toList());
       BlockProto nextMarker = response.getNextMarker();
@@ -157,6 +157,17 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
     }
   }
 
+  @Override
+  public String getBlockPoolId() throws IOException {
+    try {
+      BlockPoolResponseProto response = rpcProxy.getBlockPoolId(null,
+          BlockPoolRequestProto.newBuilder().build());
+      return response.getBlockPoolId();
+    } catch (ServiceException e) {
+      throw ProtobufHelper.getRemoteException(e);
+    }
+  }
+
   public void stop() {
     RPC.stopProxy(rpcProxy);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
index 2952a5b..ac01348 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
@@ -1122,6 +1122,6 @@ public class PBHelper {
     ProvidedStorageLocation providedStorageLocation =
         PBHelperClient.convert(providedStorageLocationProto);
 
-    return new FileRegion(block, providedStorageLocation, null);
+    return new FileRegion(block, providedStorageLocation);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java
index f7be111..3d9eeea 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java
@@ -121,7 +121,7 @@ public class InMemoryAliasMap implements InMemoryAliasMapProtocol,
         Block block = fromBlockBytes(entry.getKey());
         ProvidedStorageLocation providedStorageLocation =
             fromProvidedStorageLocationBytes(entry.getValue());
-        batch.add(new FileRegion(block, providedStorageLocation, null));
+        batch.add(new FileRegion(block, providedStorageLocation));
         ++i;
       }
       if (iterator.hasNext()) {
@@ -157,6 +157,11 @@ public class InMemoryAliasMap implements InMemoryAliasMapProtocol,
     levelDb.put(extendedBlockDbFormat, providedStorageLocationDbFormat);
   }
 
+  @Override
+  public String getBlockPoolId() {
+    return null;
+  }
+
   public void close() throws IOException {
     levelDb.close();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMapProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMapProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMapProtocol.java
index c89c6b6..89f590c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMapProtocol.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMapProtocol.java
@@ -93,4 +93,11 @@ public interface InMemoryAliasMapProtocol {
   void write(@Nonnull Block block,
       @Nonnull ProvidedStorageLocation providedStorageLocation)
       throws IOException;
+
+  /**
+   * Get the associated block pool id.
+   * @return the block pool id associated with the Namenode running
+   * the in-memory alias map.
+   */
+  String getBlockPoolId() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java
index 197e663..a229ae7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java
@@ -55,11 +55,13 @@ public class InMemoryLevelDBAliasMapServer implements InMemoryAliasMapProtocol,
   private RPC.Server aliasMapServer;
   private Configuration conf;
   private InMemoryAliasMap aliasMap;
+  private String blockPoolId;
 
   public InMemoryLevelDBAliasMapServer(
-      CheckedFunction<Configuration, InMemoryAliasMap> initFun) {
+      CheckedFunction<Configuration, InMemoryAliasMap> initFun,
+      String blockPoolId) {
     this.initFun = initFun;
-
+    this.blockPoolId = blockPoolId;
   }
 
   public void start() throws IOException {
@@ -92,7 +94,7 @@ public class InMemoryLevelDBAliasMapServer implements InMemoryAliasMapProtocol,
         .setVerbose(true)
         .build();
 
-    LOG.info("Starting InMemoryLevelDBAliasMapServer on ", rpcAddress);
+    LOG.info("Starting InMemoryLevelDBAliasMapServer on {}", rpcAddress);
     aliasMapServer.start();
   }
 
@@ -117,6 +119,11 @@ public class InMemoryLevelDBAliasMapServer implements InMemoryAliasMapProtocol,
   }
 
   @Override
+  public String getBlockPoolId() {
+    return blockPoolId;
+  }
+
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ProvidedStorageMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ProvidedStorageMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ProvidedStorageMap.java
index f6b0c0b..7fbc71a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ProvidedStorageMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ProvidedStorageMap.java
@@ -152,8 +152,12 @@ public class ProvidedStorageMap {
       LOG.info("Calling process first blk report from storage: "
           + providedStorageInfo);
       // first pass; periodic refresh should call bm.processReport
-      bm.processFirstBlockReport(providedStorageInfo,
-          new ProvidedBlockList(aliasMap.getReader(null).iterator()));
+      BlockAliasMap.Reader<BlockAlias> reader =
+          aliasMap.getReader(null, bm.getBlockPoolId());
+      if (reader != null) {
+        bm.processFirstBlockReport(providedStorageInfo,
+                new ProvidedBlockList(reader.iterator()));
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/FileRegion.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/FileRegion.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/FileRegion.java
index c2697c8..e6f0d0a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/FileRegion.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/FileRegion.java
@@ -34,39 +34,21 @@ import org.apache.hadoop.hdfs.protocol.ProvidedStorageLocation;
 public class FileRegion implements BlockAlias {
 
   private final Pair<Block, ProvidedStorageLocation> pair;
-  private final String bpid;
 
   public FileRegion(long blockId, Path path, long offset,
-      long length, String bpid, long genStamp) {
+      long length, long genStamp) {
     this(new Block(blockId, length, genStamp),
-        new ProvidedStorageLocation(path, offset, length, new byte[0]), bpid);
+        new ProvidedStorageLocation(path, offset, length, new byte[0]));
   }
 
-  public FileRegion(long blockId, Path path, long offset,
-      long length, String bpid) {
-    this(blockId, path, offset, length, bpid,
+  public FileRegion(long blockId, Path path, long offset, long length) {
+    this(blockId, path, offset, length,
         HdfsConstants.GRANDFATHER_GENERATION_STAMP);
   }
 
-  public FileRegion(long blockId, Path path, long offset,
-      long length, long genStamp) {
-    this(blockId, path, offset, length, null, genStamp);
-  }
-
   public FileRegion(Block block,
       ProvidedStorageLocation providedStorageLocation) {
     this.pair  = Pair.of(block, providedStorageLocation);
-    this.bpid = null;
-  }
-
-  public FileRegion(Block block,
-      ProvidedStorageLocation providedStorageLocation, String bpid) {
-    this.pair  = Pair.of(block, providedStorageLocation);
-    this.bpid = bpid;
-  }
-
-  public FileRegion(long blockId, Path path, long offset, long length) {
-    this(blockId, path, offset, length, null);
   }
 
   public Block getBlock() {
@@ -77,10 +59,6 @@ public class FileRegion implements BlockAlias {
     return pair.getValue();
   }
 
-  public String getBlockPoolId() {
-    return this.bpid;
-  }
-
   @Override
   public boolean equals(Object o) {
     if (this == o) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/BlockAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/BlockAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/BlockAliasMap.java
index 8233fbe..897aefd 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/BlockAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/BlockAliasMap.java
@@ -61,20 +61,22 @@ public abstract class BlockAliasMap<T extends BlockAlias> {
 
     /**
      * @param ident block to resolve
-     * @return BlockAlias correspoding to the provided block.
+     * @return BlockAlias corresponding to the provided block.
      * @throws IOException
      */
     public abstract Optional<U> resolve(Block ident) throws IOException;
-
   }
 
   /**
    * Returns a reader to the alias map.
    * @param opts reader options
-   * @return {@link Reader} to the alias map.
+   * @param blockPoolID block pool id to use
+   * @return {@link Reader} to the alias map. If a Reader for the blockPoolID
+   * cannot be created, this will return null.
    * @throws IOException
    */
-  public abstract Reader<T> getReader(Reader.Options opts) throws IOException;
+  public abstract Reader<T> getReader(Reader.Options opts, String blockPoolID)
+      throws IOException;
 
   /**
    * An abstract class used as a writer for the provided block map.
@@ -93,10 +95,12 @@ public abstract class BlockAliasMap<T extends BlockAlias> {
   /**
    * Returns the writer for the alias map.
    * @param opts writer options.
+   * @param blockPoolID block pool id to use
    * @return {@link Writer} to the alias map.
    * @throws IOException
    */
-  public abstract Writer<T> getWriter(Writer.Options opts) throws IOException;
+  public abstract Writer<T> getWriter(Writer.Options opts, String blockPoolID)
+      throws IOException;
 
   /**
    * Refresh the alias map.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/InMemoryLevelDBAliasMapClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/InMemoryLevelDBAliasMapClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/InMemoryLevelDBAliasMapClient.java
index bd6ebd1..d389184 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/InMemoryLevelDBAliasMapClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/InMemoryLevelDBAliasMapClient.java
@@ -46,6 +46,7 @@ public class InMemoryLevelDBAliasMapClient extends BlockAliasMap<FileRegion>
 
   private Configuration conf;
   private InMemoryAliasMapProtocolClientSideTranslatorPB aliasMap;
+  private String blockPoolID;
 
   @Override
   public void close() {
@@ -57,7 +58,7 @@ public class InMemoryLevelDBAliasMapClient extends BlockAliasMap<FileRegion>
     @Override
     public Optional<FileRegion> resolve(Block block) throws IOException {
       Optional<ProvidedStorageLocation> read = aliasMap.read(block);
-      return read.map(psl -> new FileRegion(block, psl, null));
+      return read.map(psl -> new FileRegion(block, psl));
     }
 
     @Override
@@ -133,12 +134,29 @@ public class InMemoryLevelDBAliasMapClient extends BlockAliasMap<FileRegion>
 
 
   @Override
-  public Reader<FileRegion> getReader(Reader.Options opts) throws IOException {
+  public Reader<FileRegion> getReader(Reader.Options opts, String blockPoolID)
+      throws IOException {
+    if (this.blockPoolID == null) {
+      this.blockPoolID = aliasMap.getBlockPoolId();
+    }
+    // if a block pool id has been supplied, and doesn't match the associated
+    // block pool id, return null.
+    if (blockPoolID != null && this.blockPoolID != null
+        && !this.blockPoolID.equals(blockPoolID)) {
+      return null;
+    }
     return new LevelDbReader();
   }
 
   @Override
-  public Writer<FileRegion> getWriter(Writer.Options opts) throws IOException {
+  public Writer<FileRegion> getWriter(Writer.Options opts, String blockPoolID)
+      throws IOException {
+    if (this.blockPoolID == null) {
+      this.blockPoolID = aliasMap.getBlockPoolId();
+    }
+    if (blockPoolID != null && !this.blockPoolID.equals(blockPoolID)) {
+      return null;
+    }
     return new LevelDbWriter();
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/LevelDBFileRegionAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/LevelDBFileRegionAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/LevelDBFileRegionAliasMap.java
index 6a3d55c..adad79a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/LevelDBFileRegionAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/LevelDBFileRegionAliasMap.java
@@ -70,7 +70,8 @@ public class LevelDBFileRegionAliasMap
   }
 
   @Override
-  public Reader<FileRegion> getReader(Reader.Options opts) throws IOException {
+  public Reader<FileRegion> getReader(Reader.Options opts, String blockPoolID)
+      throws IOException {
     if (null == opts) {
       opts = this.opts;
     }
@@ -79,11 +80,12 @@ public class LevelDBFileRegionAliasMap
     }
     LevelDBOptions o = (LevelDBOptions) opts;
     return new LevelDBFileRegionAliasMap.LevelDBReader(
-        createDB(o.levelDBPath, false));
+        createDB(o.levelDBPath, false, blockPoolID));
   }
 
   @Override
-  public Writer<FileRegion> getWriter(Writer.Options opts) throws IOException {
+  public Writer<FileRegion> getWriter(Writer.Options opts, String blockPoolID)
+      throws IOException {
     if (null == opts) {
       opts = this.opts;
     }
@@ -92,11 +94,11 @@ public class LevelDBFileRegionAliasMap
     }
     LevelDBOptions o = (LevelDBOptions) opts;
     return new LevelDBFileRegionAliasMap.LevelDBWriter(
-        createDB(o.levelDBPath, true));
+        createDB(o.levelDBPath, true, blockPoolID));
   }
 
-  private static DB createDB(String levelDBPath, boolean createIfMissing)
-      throws IOException {
+  private static DB createDB(String levelDBPath, boolean createIfMissing,
+      String blockPoolID) throws IOException {
     if (levelDBPath == null || levelDBPath.length() == 0) {
       throw new IllegalArgumentException(
           "A valid path needs to be specified for "
@@ -105,7 +107,13 @@ public class LevelDBFileRegionAliasMap
     }
     org.iq80.leveldb.Options options = new org.iq80.leveldb.Options();
     options.createIfMissing(createIfMissing);
-    return factory.open(new File(levelDBPath), options);
+    File dbFile;
+    if (blockPoolID != null) {
+      dbFile = new File(levelDBPath, blockPoolID);
+    } else {
+      dbFile = new File(levelDBPath);
+    }
+    return factory.open(dbFile, options);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
index 69d9cc1..878a208 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
@@ -82,7 +82,7 @@ public class TextFileRegionAliasMap
   }
 
   @Override
-  public Reader<FileRegion> getReader(Reader.Options opts)
+  public Reader<FileRegion> getReader(Reader.Options opts, String blockPoolID)
       throws IOException {
     if (null == opts) {
       opts = readerOpts;
@@ -94,23 +94,29 @@ public class TextFileRegionAliasMap
     Configuration readerConf = (null == o.getConf())
         ? new Configuration()
             : o.getConf();
-    return createReader(o.file, o.delim, readerConf);
+    return createReader(o.file, o.delim, readerConf, blockPoolID);
   }
 
   @VisibleForTesting
-  TextReader createReader(Path file, String delim, Configuration cfg)
-      throws IOException {
+  TextReader createReader(Path file, String delim, Configuration cfg,
+      String blockPoolID) throws IOException {
     FileSystem fs = file.getFileSystem(cfg);
     if (fs instanceof LocalFileSystem) {
       fs = ((LocalFileSystem)fs).getRaw();
     }
     CompressionCodecFactory factory = new CompressionCodecFactory(cfg);
     CompressionCodec codec = factory.getCodec(file);
-    return new TextReader(fs, file, codec, delim);
+    String filename = fileNameFromBlockPoolID(blockPoolID);
+    if (codec != null) {
+      filename = filename + codec.getDefaultExtension();
+    }
+    Path bpidFilePath = new Path(file.getParent(), filename);
+    return new TextReader(fs, bpidFilePath, codec, delim);
   }
 
   @Override
-  public Writer<FileRegion> getWriter(Writer.Options opts) throws IOException {
+  public Writer<FileRegion> getWriter(Writer.Options opts, String blockPoolID)
+      throws IOException {
     if (null == opts) {
       opts = writerOpts;
     }
@@ -121,14 +127,15 @@ public class TextFileRegionAliasMap
     Configuration cfg = (null == o.getConf())
         ? new Configuration()
             : o.getConf();
+    String baseName = fileNameFromBlockPoolID(blockPoolID);
+    Path blocksFile = new Path(o.dir, baseName);
     if (o.codec != null) {
       CompressionCodecFactory factory = new CompressionCodecFactory(cfg);
       CompressionCodec codec = factory.getCodecByName(o.codec);
-      String name = o.file.getName() + codec.getDefaultExtension();
-      o.filename(new Path(o.file.getParent(), name));
-      return createWriter(o.file, codec, o.delim, cfg);
+      blocksFile = new Path(o.dir, baseName + codec.getDefaultExtension());
+      return createWriter(blocksFile, codec, o.delim, cfg);
     }
-    return createWriter(o.file, null, o.delim, conf);
+    return createWriter(blocksFile, null, o.delim, conf);
   }
 
   @VisibleForTesting
@@ -154,15 +161,15 @@ public class TextFileRegionAliasMap
     private String delim =
         DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER_DEFAULT;
     private Path file = new Path(
-        new File(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_PATH_DEFAULT).toURI()
-            .toString());
+        new File(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_READ_FILE_DEFAULT)
+            .toURI().toString());
 
     @Override
     public void setConf(Configuration conf) {
       this.conf = conf;
       String tmpfile =
-          conf.get(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_READ_PATH,
-              DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_PATH_DEFAULT);
+          conf.get(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_READ_FILE,
+              DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_READ_FILE_DEFAULT);
       file = new Path(tmpfile);
       delim = conf.get(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER,
           DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER_DEFAULT);
@@ -195,17 +202,17 @@ public class TextFileRegionAliasMap
 
     private Configuration conf;
     private String codec = null;
-    private Path file =
-        new Path(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_PATH_DEFAULT);
+    private Path dir =
+        new Path(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_WRITE_DIR_DEFAULT);
     private String delim =
         DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER_DEFAULT;
 
     @Override
     public void setConf(Configuration conf) {
       this.conf = conf;
-      String tmpfile = conf.get(
-          DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_WRITE_PATH, file.toString());
-      file = new Path(tmpfile);
+      String tmpDir = conf.get(
+          DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_WRITE_DIR, dir.toString());
+      dir = new Path(tmpDir);
       codec = conf.get(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_CODEC);
       delim = conf.get(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER,
           DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER_DEFAULT);
@@ -217,8 +224,8 @@ public class TextFileRegionAliasMap
     }
 
     @Override
-    public WriterOptions filename(Path file) {
-      this.file = file;
+    public WriterOptions dirName(Path dir) {
+      this.dir = dir;
       return this;
     }
 
@@ -226,8 +233,8 @@ public class TextFileRegionAliasMap
       return codec;
     }
 
-    public Path getFile() {
-      return file;
+    public Path getDir() {
+      return dir;
     }
 
     @Override
@@ -267,6 +274,7 @@ public class TextFileRegionAliasMap
     private final FileSystem fs;
     private final CompressionCodec codec;
     private final Map<FRIterator, BufferedReader> iterators;
+    private final String blockPoolID;
 
     protected TextReader(FileSystem fs, Path file, CompressionCodec codec,
         String delim) {
@@ -281,6 +289,7 @@ public class TextFileRegionAliasMap
       this.codec = codec;
       this.delim = delim;
       this.iterators = Collections.synchronizedMap(iterators);
+      this.blockPoolID = blockPoolIDFromFileName(file);
     }
 
     @Override
@@ -344,12 +353,11 @@ public class TextFileRegionAliasMap
         return null;
       }
       String[] f = line.split(delim);
-      if (f.length != 6) {
+      if (f.length != 5) {
         throw new IOException("Invalid line: " + line);
       }
       return new FileRegion(Long.parseLong(f[0]), new Path(f[1]),
-          Long.parseLong(f[2]), Long.parseLong(f[3]), f[4],
-          Long.parseLong(f[5]));
+          Long.parseLong(f[2]), Long.parseLong(f[3]), Long.parseLong(f[4]));
     }
 
     public InputStream createStream() throws IOException {
@@ -409,7 +417,7 @@ public class TextFileRegionAliasMap
      */
     public interface Options extends Writer.Options {
       Options codec(String codec);
-      Options filename(Path file);
+      Options dirName(Path dir);
       Options delimiter(String delim);
     }
 
@@ -434,7 +442,6 @@ public class TextFileRegionAliasMap
       out.append(psl.getPath().toString()).append(delim);
       out.append(Long.toString(psl.getOffset())).append(delim);
       out.append(Long.toString(psl.getLength())).append(delim);
-      out.append(token.getBlockPoolId()).append(delim);
       out.append(Long.toString(block.getGenerationStamp())).append(delim);
       out.append("\n");
     }
@@ -457,4 +464,17 @@ public class TextFileRegionAliasMap
     //nothing to do;
   }
 
+  @VisibleForTesting
+  public static String blockPoolIDFromFileName(Path file) {
+    if (file == null) {
+      return "";
+    }
+    String fileName = file.getName();
+    return fileName.substring("blocks_".length()).split("\\.")[0];
+  }
+
+  @VisibleForTesting
+  public static String fileNameFromBlockPoolID(String blockPoolID) {
+    return "blocks_" + blockPoolID + ".csv";
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
index 445b455..bab788b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
@@ -63,6 +63,8 @@ import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_PROVIDED_ALIASMAP_LOAD_RETRIES;
+
 /**
  * This class is used to create provided volumes.
  */
@@ -125,6 +127,7 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
     private ReplicaMap bpVolumeMap;
     private ProvidedVolumeDF df;
     private AtomicLong numOfBlocks = new AtomicLong();
+    private int numRetries;
 
     ProvidedBlockPoolSlice(String bpid, ProvidedVolumeImpl volume,
         Configuration conf) {
@@ -138,6 +141,7 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
       this.bpid = bpid;
       this.df = new ProvidedVolumeDF();
       bpVolumeMap.initBlockPool(bpid);
+      this.numRetries = conf.getInt(DFS_PROVIDED_ALIASMAP_LOAD_RETRIES, 0);
       LOG.info("Created alias map using class: " + aliasMap.getClass());
     }
 
@@ -153,18 +157,27 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
     void fetchVolumeMap(ReplicaMap volumeMap,
         RamDiskReplicaTracker ramDiskReplicaMap, FileSystem remoteFS)
         throws IOException {
-      BlockAliasMap.Reader<FileRegion> reader = aliasMap.getReader(null);
+      BlockAliasMap.Reader<FileRegion> reader = null;
+      int tries = 1;
+      do {
+        try {
+          reader = aliasMap.getReader(null, bpid);
+          break;
+        } catch (IOException e) {
+          tries++;
+          reader = null;
+        }
+      } while (tries <= numRetries);
+
       if (reader == null) {
-        LOG.warn("Got null reader from BlockAliasMap " + aliasMap
+        LOG.error("Got null reader from BlockAliasMap " + aliasMap
             + "; no blocks will be populated");
         return;
       }
       Path blockPrefixPath = new Path(providedVolume.getBaseURI());
       for (FileRegion region : reader) {
-        if (region.getBlockPoolId() != null
-            && region.getBlockPoolId().equals(bpid)
-            && containsBlock(providedVolume.baseURI,
-                region.getProvidedStorageLocation().getPath().toUri())) {
+        if (containsBlock(providedVolume.baseURI,
+            region.getProvidedStorageLocation().getPath().toUri())) {
           String blockSuffix = getSuffix(blockPrefixPath,
               new Path(region.getProvidedStorageLocation().getPath().toUri()));
           ReplicaInfo newReplica = new ReplicaBuilder(ReplicaState.FINALIZED)
@@ -215,14 +228,12 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
        * the ids remain the same.
        */
       aliasMap.refresh();
-      BlockAliasMap.Reader<FileRegion> reader = aliasMap.getReader(null);
+      BlockAliasMap.Reader<FileRegion> reader = aliasMap.getReader(null, bpid);
       for (FileRegion region : reader) {
         reportCompiler.throttle();
-        if (region.getBlockPoolId().equals(bpid)) {
-          report.add(new ScanInfo(region.getBlock().getBlockId(),
-              providedVolume, region,
-              region.getProvidedStorageLocation().getLength()));
-        }
+        report.add(new ScanInfo(region.getBlock().getBlockId(),
+            providedVolume, region,
+            region.getProvidedStorageLocation().getLength()));
       }
     }
 
@@ -415,9 +426,7 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
         if (temp.getBlock().getBlockId() < state.lastBlockId) {
           continue;
         }
-        if (temp.getBlockPoolId().equals(bpid)) {
-          nextRegion = temp;
-        }
+        nextRegion = temp;
       }
       if (null == nextRegion) {
         return null;
@@ -435,7 +444,7 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
     public void rewind() {
       BlockAliasMap.Reader<FileRegion> reader = null;
       try {
-        reader = blockAliasMap.getReader(null);
+        reader = blockAliasMap.getReader(null, bpid);
       } catch (IOException e) {
         LOG.warn("Exception in getting reader from provided alias map");
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
index 993716a..a6f5790 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
@@ -729,6 +729,7 @@ public class NameNode extends ReconfigurableBase implements
     }
 
     loadNamesystem(conf);
+    startAliasMapServerIfNecessary(conf);
 
     rpcServer = createRpcServer(conf);
 
@@ -749,7 +750,6 @@ public class NameNode extends ReconfigurableBase implements
 
     startCommonServices(conf);
     startMetricsLogger(conf);
-    startAliasMapServerIfNecessary(conf);
   }
 
   private void startAliasMapServerIfNecessary(Configuration conf)
@@ -758,8 +758,8 @@ public class NameNode extends ReconfigurableBase implements
         DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED_DEFAULT)
         && conf.getBoolean(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED,
             DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED_DEFAULT)) {
-      levelDBAliasMapServer =
-          new InMemoryLevelDBAliasMapServer(InMemoryAliasMap::init);
+      levelDBAliasMapServer = new InMemoryLevelDBAliasMapServer(
+          InMemoryAliasMap::init, namesystem.getBlockPoolId());
       levelDBAliasMapServer.setConf(conf);
       levelDBAliasMapServer.start();
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
index 433d9b7..10650da 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
@@ -164,6 +164,10 @@ public class NamespaceInfo extends StorageInfo {
     this.clusterID = clusterID;
   }
 
+  public void setBlockPoolID(String blockPoolID) {
+    this.blockPoolID = blockPoolID;
+  }
+
   @Override
   public String toString(){
     return super.toString() + ";bpid=" + blockPoolID;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
index 08f10bb..01dd952 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
@@ -53,8 +53,16 @@ message ListResponseProto {
   optional BlockProto nextMarker = 2;
 }
 
+message BlockPoolRequestProto {
+}
+
+message BlockPoolResponseProto {
+  required string blockPoolId = 1;
+}
+
 service AliasMapProtocolService {
   rpc write(WriteRequestProto) returns(WriteResponseProto);
   rpc read(ReadRequestProto) returns(ReadResponseProto);
   rpc list(ListRequestProto) returns(ListResponseProto);
+  rpc getBlockPoolId(BlockPoolRequestProto) returns(BlockPoolResponseProto);
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
index ddc07ac..3dc583c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
@@ -4696,7 +4696,7 @@
   </property>
 
   <property>
-    <name>dfs.provided.aliasmap.text.read.path</name>
+    <name>dfs.provided.aliasmap.text.read.file</name>
     <value></value>
     <description>
         The path specifying the provided block map as a text file, specified as
@@ -4713,7 +4713,7 @@
   </property>
 
   <property>
-    <name>dfs.provided.aliasmap.text.write.path</name>
+    <name>dfs.provided.aliasmap.text.write.dir</name>
     <value></value>
     <description>
         The path to which the provided block map should be written as a text
@@ -4722,6 +4722,25 @@
   </property>
 
   <property>
+    <name>dfs.provided.aliasmap.leveldb.path</name>
+    <value></value>
+    <description>
+      The read/write path for the leveldb-based alias map
+      (org.apache.hadoop.hdfs.server.common.blockaliasmap.impl.LevelDBFileRegionAliasMap).
+      The path has to be explicitly configured when this alias map is used.
+    </description>
+  </property>
+
+  <property>
+    <name>dfs.provided.aliasmap.load.retries</name>
+    <value>0</value>
+    <description>
+      The number of retries on the Datanode to load the provided aliasmap;
+      defaults to 0.
+    </description>
+  </property>
+
+  <property>
     <name>dfs.lock.suppress.warning.interval</name>
     <value>10s</value>
     <description>Instrumentation reporting long critical sections will suppress

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestProvidedStorageMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestProvidedStorageMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestProvidedStorageMap.java
index faf1f83..b419c38 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestProvidedStorageMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestProvidedStorageMap.java
@@ -44,6 +44,7 @@ public class TestProvidedStorageMap {
   private BlockManager bm;
   private RwLock nameSystemLock;
   private String providedStorageID;
+  private String blockPoolID;
 
   @Before
   public void setup() {
@@ -55,8 +56,9 @@ public class TestProvidedStorageMap {
     conf.setClass(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_CLASS,
         TestProvidedImpl.TestFileRegionBlockAliasMap.class,
         BlockAliasMap.class);
-
+    blockPoolID = "BP-12344-10.1.1.2-12344";
     bm = mock(BlockManager.class);
+    when(bm.getBlockPoolId()).thenReturn(blockPoolID);
     nameSystemLock = mock(RwLock.class);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestInMemoryLevelDBAliasMapClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestInMemoryLevelDBAliasMapClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestInMemoryLevelDBAliasMapClient.java
index 4a9661b..a388199 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestInMemoryLevelDBAliasMapClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestInMemoryLevelDBAliasMapClient.java
@@ -56,11 +56,10 @@ public class TestInMemoryLevelDBAliasMapClient {
   private InMemoryLevelDBAliasMapClient inMemoryLevelDBAliasMapClient;
   private File tempDir;
   private Configuration conf;
+  private final static String BPID = "BPID-0";
 
   @Before
   public void setUp() throws IOException {
-    levelDBAliasMapServer =
-        new InMemoryLevelDBAliasMapServer(InMemoryAliasMap::init);
     conf = new Configuration();
     int port = 9876;
 
@@ -69,6 +68,8 @@ public class TestInMemoryLevelDBAliasMapClient {
     tempDir = Files.createTempDir();
     conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
         tempDir.getAbsolutePath());
+    levelDBAliasMapServer =
+        new InMemoryLevelDBAliasMapServer(InMemoryAliasMap::init, BPID);
     inMemoryLevelDBAliasMapClient = new InMemoryLevelDBAliasMapClient();
   }
 
@@ -81,20 +82,20 @@ public class TestInMemoryLevelDBAliasMapClient {
 
   @Test
   public void writeRead() throws Exception {
-    inMemoryLevelDBAliasMapClient.setConf(conf);
     levelDBAliasMapServer.setConf(conf);
     levelDBAliasMapServer.start();
+    inMemoryLevelDBAliasMapClient.setConf(conf);
     Block block = new Block(42, 43, 44);
     byte[] nonce = "blackbird".getBytes();
     ProvidedStorageLocation providedStorageLocation
         = new ProvidedStorageLocation(new Path("cuckoo"),
         45, 46, nonce);
     BlockAliasMap.Writer<FileRegion> writer =
-        inMemoryLevelDBAliasMapClient.getWriter(null);
+        inMemoryLevelDBAliasMapClient.getWriter(null, BPID);
     writer.store(new FileRegion(block, providedStorageLocation));
 
     BlockAliasMap.Reader<FileRegion> reader =
-        inMemoryLevelDBAliasMapClient.getReader(null);
+        inMemoryLevelDBAliasMapClient.getReader(null, BPID);
     Optional<FileRegion> fileRegion = reader.resolve(block);
     assertEquals(new FileRegion(block, providedStorageLocation),
         fileRegion.get());
@@ -102,9 +103,9 @@ public class TestInMemoryLevelDBAliasMapClient {
 
   @Test
   public void iterateSingleBatch() throws Exception {
-    inMemoryLevelDBAliasMapClient.setConf(conf);
     levelDBAliasMapServer.setConf(conf);
     levelDBAliasMapServer.start();
+    inMemoryLevelDBAliasMapClient.setConf(conf);
     Block block1 = new Block(42, 43, 44);
     Block block2 = new Block(43, 44, 45);
     byte[] nonce1 = "blackbird".getBytes();
@@ -116,14 +117,14 @@ public class TestInMemoryLevelDBAliasMapClient {
         new ProvidedStorageLocation(new Path("falcon"),
             46, 47, nonce2);
     BlockAliasMap.Writer<FileRegion> writer1 =
-        inMemoryLevelDBAliasMapClient.getWriter(null);
+        inMemoryLevelDBAliasMapClient.getWriter(null, BPID);
     writer1.store(new FileRegion(block1, providedStorageLocation1));
     BlockAliasMap.Writer<FileRegion> writer2 =
-        inMemoryLevelDBAliasMapClient.getWriter(null);
+        inMemoryLevelDBAliasMapClient.getWriter(null, BPID);
     writer2.store(new FileRegion(block2, providedStorageLocation2));
 
     BlockAliasMap.Reader<FileRegion> reader =
-        inMemoryLevelDBAliasMapClient.getReader(null);
+        inMemoryLevelDBAliasMapClient.getReader(null, BPID);
     List<FileRegion> actualFileRegions =
         Lists.newArrayListWithCapacity(2);
     for (FileRegion fileRegion : reader) {
@@ -140,8 +141,8 @@ public class TestInMemoryLevelDBAliasMapClient {
   public void iterateThreeBatches() throws Exception {
     conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_BATCH_SIZE, "2");
     levelDBAliasMapServer.setConf(conf);
-    inMemoryLevelDBAliasMapClient.setConf(conf);
     levelDBAliasMapServer.start();
+    inMemoryLevelDBAliasMapClient.setConf(conf);
     Block block1 = new Block(42, 43, 44);
     Block block2 = new Block(43, 44, 45);
     Block block3 = new Block(44, 45, 46);
@@ -173,26 +174,26 @@ public class TestInMemoryLevelDBAliasMapClient {
         new ProvidedStorageLocation(new Path("duck"),
             56, 57, nonce6);
     inMemoryLevelDBAliasMapClient
-        .getWriter(null)
+        .getWriter(null, BPID)
         .store(new FileRegion(block1, providedStorageLocation1));
     inMemoryLevelDBAliasMapClient
-        .getWriter(null)
+        .getWriter(null, BPID)
         .store(new FileRegion(block2, providedStorageLocation2));
     inMemoryLevelDBAliasMapClient
-        .getWriter(null)
+        .getWriter(null, BPID)
         .store(new FileRegion(block3, providedStorageLocation3));
     inMemoryLevelDBAliasMapClient
-        .getWriter(null)
+        .getWriter(null, BPID)
         .store(new FileRegion(block4, providedStorageLocation4));
     inMemoryLevelDBAliasMapClient
-        .getWriter(null)
+        .getWriter(null, BPID)
         .store(new FileRegion(block5, providedStorageLocation5));
     inMemoryLevelDBAliasMapClient
-        .getWriter(null)
+        .getWriter(null, BPID)
         .store(new FileRegion(block6, providedStorageLocation6));
 
     BlockAliasMap.Reader<FileRegion> reader =
-        inMemoryLevelDBAliasMapClient.getReader(null);
+        inMemoryLevelDBAliasMapClient.getReader(null, BPID);
     List<FileRegion> actualFileRegions =
         Lists.newArrayListWithCapacity(6);
     for (FileRegion fileRegion : reader) {
@@ -278,9 +279,9 @@ public class TestInMemoryLevelDBAliasMapClient {
 
   @Test
   public void multipleReads() throws IOException {
-    inMemoryLevelDBAliasMapClient.setConf(conf);
     levelDBAliasMapServer.setConf(conf);
     levelDBAliasMapServer.start();
+    inMemoryLevelDBAliasMapClient.setConf(conf);
 
     Random r = new Random();
     List<FileRegion> expectedFileRegions = r.ints(0, 200)
@@ -291,9 +292,9 @@ public class TestInMemoryLevelDBAliasMapClient {
 
 
     BlockAliasMap.Reader<FileRegion> reader =
-        inMemoryLevelDBAliasMapClient.getReader(null);
+        inMemoryLevelDBAliasMapClient.getReader(null, BPID);
     BlockAliasMap.Writer<FileRegion> writer =
-        inMemoryLevelDBAliasMapClient.getWriter(null);
+        inMemoryLevelDBAliasMapClient.getWriter(null, BPID);
 
     ExecutorService executor = Executors.newCachedThreadPool();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDBFileRegionAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDBFileRegionAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDBFileRegionAliasMap.java
index 21199e1..a3c13e9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDBFileRegionAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDBFileRegionAliasMap.java
@@ -35,6 +35,8 @@ import static org.junit.Assert.assertNotEquals;
  */
 public class TestLevelDBFileRegionAliasMap {
 
+  private static final String BPID = "BPID-0";
+
   /**
    * A basic test to verify that we can write data and read it back again.
    * @throws Exception
@@ -48,13 +50,13 @@ public class TestLevelDBFileRegionAliasMap {
       LevelDBFileRegionAliasMap.LevelDBOptions opts =
           new LevelDBFileRegionAliasMap.LevelDBOptions()
               .filename(dbFile.getAbsolutePath());
-      BlockAliasMap.Writer<FileRegion> writer = frf.getWriter(opts);
+      BlockAliasMap.Writer<FileRegion> writer = frf.getWriter(opts, BPID);
 
       FileRegion fr = new FileRegion(1, new Path("/file"), 1, 1, 1);
       writer.store(fr);
       writer.close();
 
-      BlockAliasMap.Reader<FileRegion> reader = frf.getReader(opts);
+      BlockAliasMap.Reader<FileRegion> reader = frf.getReader(opts, BPID);
       FileRegion fr2 = reader.resolve(new Block(1, 1, 1)).get();
       assertEquals(fr, fr2);
       reader.close();
@@ -86,14 +88,14 @@ public class TestLevelDBFileRegionAliasMap {
       LevelDBFileRegionAliasMap.LevelDBOptions opts =
           new LevelDBFileRegionAliasMap.LevelDBOptions()
               .filename(dbFile.getAbsolutePath());
-      BlockAliasMap.Writer<FileRegion> writer = frf.getWriter(opts);
+      BlockAliasMap.Writer<FileRegion> writer = frf.getWriter(opts, BPID);
 
       for (FileRegion fr : regions) {
         writer.store(fr);
       }
       writer.close();
 
-      BlockAliasMap.Reader<FileRegion> reader = frf.getReader(opts);
+      BlockAliasMap.Reader<FileRegion> reader = frf.getReader(opts, BPID);
       Iterator<FileRegion> it = reader.iterator();
       int last = -1;
       int count = 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDbMockAliasMapClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDbMockAliasMapClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDbMockAliasMapClient.java
index 43fc68c..8212b28 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDbMockAliasMapClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestLevelDbMockAliasMapClient.java
@@ -36,6 +36,7 @@ import java.io.IOException;
 import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests the in-memory alias map with a mock level-db implementation.
@@ -46,12 +47,14 @@ public class TestLevelDbMockAliasMapClient {
   private File tempDir;
   private Configuration conf;
   private InMemoryAliasMap aliasMapMock;
+  private final String bpid = "BPID-0";
 
   @Before
   public void setUp() throws IOException {
     aliasMapMock = mock(InMemoryAliasMap.class);
+    when(aliasMapMock.getBlockPoolId()).thenReturn(bpid);
     levelDBAliasMapServer = new InMemoryLevelDBAliasMapServer(
-        config -> aliasMapMock);
+        config -> aliasMapMock, bpid);
     conf = new Configuration();
     int port = 9877;
 
@@ -60,10 +63,10 @@ public class TestLevelDbMockAliasMapClient {
     tempDir = Files.createTempDir();
     conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
         tempDir.getAbsolutePath());
-    inMemoryLevelDBAliasMapClient = new InMemoryLevelDBAliasMapClient();
-    inMemoryLevelDBAliasMapClient.setConf(conf);
     levelDBAliasMapServer.setConf(conf);
     levelDBAliasMapServer.start();
+    inMemoryLevelDBAliasMapClient = new InMemoryLevelDBAliasMapClient();
+    inMemoryLevelDBAliasMapClient.setConf(conf);
   }
 
   @After
@@ -83,11 +86,13 @@ public class TestLevelDbMockAliasMapClient {
 
     assertThatExceptionOfType(IOException.class)
         .isThrownBy(() ->
-            inMemoryLevelDBAliasMapClient.getReader(null).resolve(block));
+            inMemoryLevelDBAliasMapClient.getReader(null, bpid)
+                .resolve(block));
 
     assertThatExceptionOfType(IOException.class)
         .isThrownBy(() ->
-            inMemoryLevelDBAliasMapClient.getReader(null).resolve(block));
+            inMemoryLevelDBAliasMapClient.getReader(null, bpid)
+                .resolve(block));
   }
 
   @Test
@@ -104,12 +109,12 @@ public class TestLevelDbMockAliasMapClient {
 
     assertThatExceptionOfType(IOException.class)
         .isThrownBy(() ->
-            inMemoryLevelDBAliasMapClient.getWriter(null)
+            inMemoryLevelDBAliasMapClient.getWriter(null, bpid)
                 .store(new FileRegion(block, providedStorageLocation)));
 
     assertThatExceptionOfType(IOException.class)
         .isThrownBy(() ->
-            inMemoryLevelDBAliasMapClient.getWriter(null)
+            inMemoryLevelDBAliasMapClient.getWriter(null, bpid)
                 .store(new FileRegion(block, providedStorageLocation)));
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestTextBlockAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestTextBlockAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestTextBlockAliasMap.java
index 79308a3..29c53e7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestTextBlockAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TestTextBlockAliasMap.java
@@ -31,7 +31,10 @@ import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.compress.CompressionCodec;
 
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.junit.Test;
+
+import static org.apache.hadoop.hdfs.server.common.blockaliasmap.impl.TextFileRegionAliasMap.fileNameFromBlockPoolID;
 import static org.junit.Assert.*;
 
 /**
@@ -39,7 +42,10 @@ import static org.junit.Assert.*;
  */
 public class TestTextBlockAliasMap {
 
-  static final Path OUTFILE = new Path("hdfs://dummyServer:0000/dummyFile.txt");
+  static final String OUTFILE_PATH = "hdfs://dummyServer:0000/";
+  static final String OUTFILE_BASENAME = "dummyFile";
+  static final Path OUTFILE = new Path(OUTFILE_PATH, OUTFILE_BASENAME + "txt");
+  static final String BPID = "BPID-0";
 
   void check(TextWriter.Options opts, final Path vp,
       final Class<? extends CompressionCodec> vc) throws IOException {
@@ -56,7 +62,25 @@ public class TestTextBlockAliasMap {
         return null; // ignored
       }
     };
-    mFmt.getWriter(opts);
+    mFmt.getWriter(opts, BPID);
+  }
+
+  void check(TextReader.Options opts, final Path vp,
+      final Class<? extends CompressionCodec> vc) throws IOException {
+    TextFileRegionAliasMap aliasMap = new TextFileRegionAliasMap() {
+      @Override
+      public TextReader createReader(Path file, String delim, Configuration cfg,
+          String blockPoolID) throws IOException {
+        assertEquals(vp, file);
+        if (null != vc) {
+          CompressionCodecFactory factory = new CompressionCodecFactory(cfg);
+          CompressionCodec codec = factory.getCodec(file);
+          assertEquals(vc, codec.getClass());
+        }
+        return null; // ignored
+      }
+    };
+    aliasMap.getReader(opts, BPID);
   }
 
   @Test
@@ -64,18 +88,33 @@ public class TestTextBlockAliasMap {
     TextWriter.Options opts = TextWriter.defaults();
     assertTrue(opts instanceof WriterOptions);
     WriterOptions wopts = (WriterOptions) opts;
-    Path def = new Path(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_PATH_DEFAULT);
-    assertEquals(def, wopts.getFile());
+    Path def =
+        new Path(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_WRITE_DIR_DEFAULT);
+    assertEquals(def, wopts.getDir());
     assertNull(wopts.getCodec());
 
-    opts.filename(OUTFILE);
-    check(opts, OUTFILE, null);
+    Path cp = new Path(OUTFILE_PATH, "blocks_" + BPID + ".csv");
+    opts.dirName(new Path(OUTFILE_PATH));
+    check(opts, cp, null);
 
-    opts.filename(OUTFILE);
     opts.codec("gzip");
-    Path cp = new Path(OUTFILE.getParent(), OUTFILE.getName() + ".gz");
+    cp = new Path(OUTFILE_PATH, "blocks_" + BPID + ".csv.gz");
     check(opts, cp, org.apache.hadoop.io.compress.GzipCodec.class);
+  }
 
+  @Test
+  public void testReaderOptions() throws Exception {
+    TextReader.Options opts = TextReader.defaults();
+    assertTrue(opts instanceof ReaderOptions);
+    ReaderOptions ropts = (ReaderOptions) opts;
+
+    Path cp = new Path(OUTFILE_PATH, fileNameFromBlockPoolID(BPID));
+    opts.filename(cp);
+    check(opts, cp, null);
+
+    cp = new Path(OUTFILE_PATH, "blocks_" + BPID + ".csv.gz");
+    opts.filename(cp);
+    check(opts, cp, org.apache.hadoop.io.compress.GzipCodec.class);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
index 8bdbaa4..1a89f76 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
@@ -150,7 +150,7 @@ public class TestProvidedImpl {
           }
         }
         region = new FileRegion(currentCount, new Path(newFile.toString()),
-            0, BLK_LEN, BLOCK_POOL_IDS[CHOSEN_BP_ID]);
+            0, BLK_LEN);
         currentCount++;
       }
       return region;
@@ -194,9 +194,12 @@ public class TestProvidedImpl {
     }
 
     @Override
-    public Reader<FileRegion> getReader(Reader.Options opts)
+    public Reader<FileRegion> getReader(Reader.Options opts, String blockPoolId)
         throws IOException {
 
+      if (!blockPoolId.equals(BLOCK_POOL_IDS[CHOSEN_BP_ID])) {
+        return null;
+      }
       BlockAliasMap.Reader<FileRegion> reader =
           new BlockAliasMap.Reader<FileRegion>() {
             @Override
@@ -224,7 +227,7 @@ public class TestProvidedImpl {
     }
 
     @Override
-    public Writer<FileRegion> getWriter(Writer.Options opts)
+    public Writer<FileRegion> getWriter(Writer.Options opts, String blockPoolId)
         throws IOException {
       // not implemented
       return null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java
index 4598e9c..80bbaf9 100644
--- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java
+++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileSystemImage.java
@@ -73,6 +73,7 @@ public class FileSystemImage implements Tool {
     options.addOption("i", "blockidclass", true, "Block resolver class");
     options.addOption("c", "cachedirs", true, "Max active dirents");
     options.addOption("cid", "clusterID", true, "Cluster ID");
+    options.addOption("bpid", "blockPoolID", true, "Block Pool ID");
     options.addOption("h", "help", false, "Print usage");
     return options;
   }
@@ -120,6 +121,9 @@ public class FileSystemImage implements Tool {
       case "cid":
         opts.clusterID(o.getValue());
         break;
+      case "bpid":
+        opts.blockPoolID(o.getValue());
+        break;
       default:
         throw new UnsupportedOperationException("Internal error");
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
index 370c683..282429a 100644
--- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
+++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
@@ -134,6 +134,11 @@ public class ImageWriter implements Closeable {
         if (opts.clusterID.length() > 0) {
           info.setClusterID(opts.clusterID);
         }
+        // if block pool id is given
+        if (opts.blockPoolID.length() > 0) {
+          info.setBlockPoolID(opts.blockPoolID);
+        }
+
         stor.format(info);
         blockPoolID = info.getBlockPoolID();
       }
@@ -165,7 +170,7 @@ public class ImageWriter implements Closeable {
     BlockAliasMap<FileRegion> fmt = null == opts.blocks
         ? ReflectionUtils.newInstance(opts.aliasMap, opts.getConf())
         : opts.blocks;
-    blocks = fmt.getWriter(null);
+    blocks = fmt.getWriter(null, blockPoolID);
     blockIds = null == opts.blockIds
         ? ReflectionUtils.newInstance(opts.blockIdsClass, opts.getConf())
         : opts.blockIds;
@@ -525,6 +530,7 @@ public class ImageWriter implements Closeable {
     private Class<? extends UGIResolver> ugisClass;
     private BlockAliasMap<FileRegion> blocks;
     private String clusterID;
+    private String blockPoolID;
 
     @SuppressWarnings("rawtypes")
     private Class<? extends BlockAliasMap> aliasMap;
@@ -552,6 +558,7 @@ public class ImageWriter implements Closeable {
       blockIdsClass = conf.getClass(BLOCK_RESOLVER_CLASS,
           FixedBlockResolver.class, BlockResolver.class);
       clusterID = "";
+      blockPoolID = "";
     }
 
     @Override
@@ -614,6 +621,11 @@ public class ImageWriter implements Closeable {
       this.clusterID = clusterID;
       return this;
     }
+
+    public Options blockPoolID(String blockPoolID) {
+      this.blockPoolID = blockPoolID;
+      return this;
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/NullBlockAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/NullBlockAliasMap.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/NullBlockAliasMap.java
index fcaaf44..41e202d 100644
--- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/NullBlockAliasMap.java
+++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/NullBlockAliasMap.java
@@ -36,7 +36,8 @@ import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;
 public class NullBlockAliasMap extends BlockAliasMap<FileRegion> {
 
   @Override
-  public Reader<FileRegion> getReader(Reader.Options opts) throws IOException {
+  public Reader<FileRegion> getReader(Reader.Options opts, String blockPoolID)
+      throws IOException {
     return new Reader<FileRegion>() {
       @Override
       public Iterator<FileRegion> iterator() {
@@ -69,7 +70,8 @@ public class NullBlockAliasMap extends BlockAliasMap<FileRegion> {
   }
 
   @Override
-  public Writer getWriter(Writer.Options opts) throws IOException {
+  public Writer getWriter(Writer.Options opts, String blockPoolID)
+      throws IOException {
     return new Writer<FileRegion>() {
       @Override
       public void store(FileRegion token) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/TreePath.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/TreePath.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/TreePath.java
index 8f1382a..aca1220 100644
--- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/TreePath.java
+++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/TreePath.java
@@ -107,8 +107,7 @@ public class TreePath {
       long genStamp, String blockPoolID,
       BlockAliasMap.Writer<FileRegion> out) throws IOException {
     FileStatus s = getFileStatus();
-    out.store(new FileRegion(blockId, s.getPath(), offset, length,
-        blockPoolID, genStamp));
+    out.store(new FileRegion(blockId, s.getPath(), offset, length, genStamp));
   }
 
   INode toFile(UGIResolver ugi, BlockResolver blk,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52f56248/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java
index 70e4c33..deaf9d5 100644
--- a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java
+++ b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeProvidedImplementation.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
-import org.apache.hadoop.hdfs.BlockMissingException;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSTestUtil;
@@ -79,6 +78,7 @@ import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.hadoop.hdfs.server.common.blockaliasmap.impl.TextFileRegionAliasMap.fileNameFromBlockPoolID;
 import static org.apache.hadoop.net.NodeBase.PATH_SEPARATOR_STR;
 import static org.junit.Assert.*;
 
@@ -93,7 +93,6 @@ public class TestNameNodeProvidedImplementation {
   final Path BASE = new Path(fBASE.toURI().toString());
   final Path NAMEPATH = new Path(BASE, "providedDir");
   final Path NNDIRPATH = new Path(BASE, "nnDir");
-  final Path BLOCKFILE = new Path(NNDIRPATH, "blocks.csv");
   final String SINGLEUSER = "usr1";
   final String SINGLEGROUP = "grp1";
   private final int numFiles = 10;
@@ -101,6 +100,7 @@ public class TestNameNodeProvidedImplementation {
   private final String fileSuffix = ".dat";
   private final int baseFileLen = 1024;
   private long providedDataSize = 0;
+  private final String bpid = "BP-1234-10.1.1.1-1224";
 
   Configuration conf;
   MiniDFSCluster cluster;
@@ -123,10 +123,10 @@ public class TestNameNodeProvidedImplementation {
 
     conf.setClass(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_CLASS,
         TextFileRegionAliasMap.class, BlockAliasMap.class);
-    conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_WRITE_PATH,
-        BLOCKFILE.toString());
-    conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_READ_PATH,
-        BLOCKFILE.toString());
+    conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_WRITE_DIR,
+        NNDIRPATH.toString());
+    conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_READ_FILE,
+        new Path(NNDIRPATH, fileNameFromBlockPoolID(bpid)).toString());
     conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_TEXT_DELIMITER, ",");
 
     conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR_PROVIDED,
@@ -189,14 +189,14 @@ public class TestNameNodeProvidedImplementation {
     opts.output(out.toString())
         .blocks(aliasMapClass)
         .blockIds(blockIdsClass)
-        .clusterID(clusterID);
+        .clusterID(clusterID)
+        .blockPoolID(bpid);
     try (ImageWriter w = new ImageWriter(opts)) {
       for (TreePath e : t) {
         w.accept(e);
       }
     }
   }
-
   void startCluster(Path nspath, int numDatanodes,
       StorageType[] storageTypes,
       StorageType[][] storageTypesPerDatanode,
@@ -743,9 +743,7 @@ public class TestNameNodeProvidedImplementation {
   }
 
 
-  // This test will fail until there is a refactoring of the FileRegion
-  // (HDFS-12713).
-  @Test(expected=BlockMissingException.class)
+  @Test
   public void testInMemoryAliasMap() throws Exception {
     conf.setClass(ImageWriter.Options.UGI_CLASS,
         FsUGIResolver.class, UGIResolver.class);
@@ -758,9 +756,9 @@ public class TestNameNodeProvidedImplementation {
     conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
         tempDirectory.getAbsolutePath());
     conf.setBoolean(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_ENABLED, true);
-
+    conf.setInt(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_LOAD_RETRIES, 10);
     InMemoryLevelDBAliasMapServer levelDBAliasMapServer =
-        new InMemoryLevelDBAliasMapServer(InMemoryAliasMap::init);
+        new InMemoryLevelDBAliasMapServer(InMemoryAliasMap::init, bpid);
     levelDBAliasMapServer.setConf(conf);
     levelDBAliasMapServer.start();
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org