You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by kk...@apache.org on 2017/12/19 00:09:14 UTC
[23/50] [abbrv] hadoop git commit: HDFS-12685. [READ] FsVolumeImpl
exception when scanning Provided storage volume
HDFS-12685. [READ] FsVolumeImpl exception when scanning Provided storage volume
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cc933cba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cc933cba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cc933cba
Branch: refs/heads/YARN-6592
Commit: cc933cba77c147153e463415fc192cee2d53a1ef
Parents: 4d59dab
Author: Virajith Jalaparti <vi...@apache.org>
Authored: Thu Nov 30 10:11:12 2017 -0800
Committer: Chris Douglas <cd...@apache.org>
Committed: Fri Dec 15 17:51:40 2017 -0800
----------------------------------------------------------------------
.../impl/TextFileRegionAliasMap.java | 3 +-
.../hdfs/server/datanode/DirectoryScanner.java | 3 +-
.../server/datanode/fsdataset/FsVolumeSpi.java | 40 ++++++++++----------
.../fsdataset/impl/ProvidedVolumeImpl.java | 4 +-
.../fsdataset/impl/TestProvidedImpl.java | 19 ++++++----
5 files changed, 37 insertions(+), 32 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/cc933cba/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
index 80f48c1..bd04d60 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java
@@ -439,7 +439,8 @@ public class TextFileRegionAliasMap
@Override
public void refresh() throws IOException {
- //nothing to do;
+ throw new UnsupportedOperationException(
+ "Refresh not supported by " + getClass());
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/cc933cba/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 8fb8551..ab9743c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -515,7 +515,8 @@ public class DirectoryScanner implements Runnable {
*
* @return a map of sorted arrays of block information
*/
- private Map<String, ScanInfo[]> getDiskReport() {
+ @VisibleForTesting
+ public Map<String, ScanInfo[]> getDiskReport() {
ScanInfoPerBlockPool list = new ScanInfoPerBlockPool();
ScanInfoPerBlockPool[] dirReports = null;
// First get list of data directories
http://git-wip-us.apache.org/repos/asf/hadoop/blob/cc933cba/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
index 15e71f0..20a153d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
@@ -296,8 +296,23 @@ public interface FsVolumeSpi
*/
public ScanInfo(long blockId, File blockFile, File metaFile,
FsVolumeSpi vol) {
- this(blockId, blockFile, metaFile, vol, null,
- (blockFile != null) ? blockFile.length() : 0);
+ this.blockId = blockId;
+ String condensedVolPath =
+ (vol == null || vol.getBaseURI() == null) ? null :
+ getCondensedPath(new File(vol.getBaseURI()).getAbsolutePath());
+ this.blockSuffix = blockFile == null ? null :
+ getSuffix(blockFile, condensedVolPath);
+ this.blockLength = (blockFile != null) ? blockFile.length() : 0;
+ if (metaFile == null) {
+ this.metaSuffix = null;
+ } else if (blockFile == null) {
+ this.metaSuffix = getSuffix(metaFile, condensedVolPath);
+ } else {
+ this.metaSuffix = getSuffix(metaFile,
+ condensedVolPath + blockSuffix);
+ }
+ this.volume = vol;
+ this.fileRegion = null;
}
/**
@@ -305,31 +320,18 @@ public interface FsVolumeSpi
* the block data and meta-data files.
*
* @param blockId the block ID
- * @param blockFile the path to the block data file
- * @param metaFile the path to the block meta-data file
* @param vol the volume that contains the block
* @param fileRegion the file region (for provided blocks)
* @param length the length of the block data
*/
- public ScanInfo(long blockId, File blockFile, File metaFile,
- FsVolumeSpi vol, FileRegion fileRegion, long length) {
+ public ScanInfo(long blockId, FsVolumeSpi vol, FileRegion fileRegion,
+ long length) {
this.blockId = blockId;
- String condensedVolPath =
- (vol == null || vol.getBaseURI() == null) ? null :
- getCondensedPath(new File(vol.getBaseURI()).getAbsolutePath());
- this.blockSuffix = blockFile == null ? null :
- getSuffix(blockFile, condensedVolPath);
this.blockLength = length;
- if (metaFile == null) {
- this.metaSuffix = null;
- } else if (blockFile == null) {
- this.metaSuffix = getSuffix(metaFile, condensedVolPath);
- } else {
- this.metaSuffix = getSuffix(metaFile,
- condensedVolPath + blockSuffix);
- }
this.volume = vol;
this.fileRegion = fileRegion;
+ this.blockSuffix = null;
+ this.metaSuffix = null;
}
/**
http://git-wip-us.apache.org/repos/asf/hadoop/blob/cc933cba/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
index 65487f9..ab59fa5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
@@ -226,9 +226,7 @@ public class ProvidedVolumeImpl extends FsVolumeImpl {
reportCompiler.throttle();
FileRegion region = iter.next();
if (region.getBlockPoolId().equals(bpid)) {
- LOG.info("Adding ScanInfo for blkid " +
- region.getBlock().getBlockId());
- report.add(new ScanInfo(region.getBlock().getBlockId(), null, null,
+ report.add(new ScanInfo(region.getBlock().getBlockId(),
providedVolume, region, region.getLength()));
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/cc933cba/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
index 52112f7..4190730 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java
@@ -61,6 +61,7 @@ import org.apache.hadoop.hdfs.server.datanode.BlockScanner;
import org.apache.hadoop.hdfs.server.datanode.DNConf;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataStorage;
+import org.apache.hadoop.hdfs.server.datanode.DirectoryScanner;
import org.apache.hadoop.hdfs.server.datanode.ProvidedReplica;
import org.apache.hadoop.hdfs.server.datanode.ReplicaInfo;
import org.apache.hadoop.hdfs.server.datanode.ShortCircuitRegistry;
@@ -231,14 +232,6 @@ public class TestProvidedImpl {
public void refresh() throws IOException {
// do nothing!
}
-
- public void setMinBlkId(int minId) {
- this.minId = minId;
- }
-
- public void setBlockCount(int numBlocks) {
- this.numBlocks = numBlocks;
- }
}
private static Storage.StorageDirectory createLocalStorageDirectory(
@@ -606,4 +599,14 @@ public class TestProvidedImpl {
}
}
}
+
+ @Test
+ public void testScannerWithProvidedVolumes() throws Exception {
+ DirectoryScanner scanner = new DirectoryScanner(datanode, dataset, conf);
+ Map<String, FsVolumeSpi.ScanInfo[]> report = scanner.getDiskReport();
+ // no blocks should be reported for the Provided volume as long as
+ // the directoryScanner is disabled.
+ assertEquals(0, report.get(BLOCK_POOL_IDS[CHOSEN_BP_ID]).length);
+ }
+
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org