You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by yl...@apache.org on 2015/05/13 02:54:04 UTC
hadoop git commit: HDFS-8363. Erasure Coding:
DFSStripedInputStream#seekToNewSource. (yliu)
Repository: hadoop
Updated Branches:
refs/heads/HDFS-7285 95205a31f -> 64be3d5ba
HDFS-8363. Erasure Coding: DFSStripedInputStream#seekToNewSource. (yliu)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/64be3d5b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/64be3d5b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/64be3d5b
Branch: refs/heads/HDFS-7285
Commit: 64be3d5ba7905c8e707997abccf5540918d16ad3
Parents: 95205a3
Author: yliu <yl...@apache.org>
Authored: Wed May 13 08:48:56 2015 +0800
Committer: yliu <yl...@apache.org>
Committed: Wed May 13 08:48:56 2015 +0800
----------------------------------------------------------------------
.../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt | 2 ++
.../apache/hadoop/hdfs/DFSStripedInputStream.java | 15 ++++++++++++---
2 files changed, 14 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/64be3d5b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index 79ad208..0a2bb9e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -204,3 +204,5 @@
HDFS-8368. Erasure Coding: DFS opening a non-existent file need to be
handled properly (Rakesh R via zhz)
+
+ HDFS-8363. Erasure Coding: DFSStripedInputStream#seekToNewSource. (yliu)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/64be3d5b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
index 7678fae..8f15eda 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
@@ -130,12 +130,12 @@ public class DFSStripedInputStream extends DFSInputStream {
}
}
- private final short groupSize = HdfsConstants.NUM_DATA_BLOCKS;
- private final BlockReader[] blockReaders = new BlockReader[groupSize];
- private final DatanodeInfo[] currentNodes = new DatanodeInfo[groupSize];
+ private final BlockReader[] blockReaders;
+ private final DatanodeInfo[] currentNodes;
private final int cellSize;
private final short dataBlkNum;
private final short parityBlkNum;
+ private final short groupSize;
/** the buffer for a complete stripe */
private ByteBuffer curStripeBuf;
private final ECSchema schema;
@@ -155,6 +155,9 @@ public class DFSStripedInputStream extends DFSInputStream {
cellSize = schema.getChunkSize();
dataBlkNum = (short) schema.getNumDataUnits();
parityBlkNum = (short) schema.getNumParityUnits();
+ groupSize = dataBlkNum;
+ blockReaders = new BlockReader[groupSize];
+ currentNodes = new DatanodeInfo[groupSize];
curStripeRange = new StripeRange(0, 0);
readingService =
new ExecutorCompletionService<>(dfsClient.getStripedReadsThreadPool());
@@ -392,6 +395,12 @@ public class DFSStripedInputStream extends DFSInputStream {
}
@Override
+ public synchronized boolean seekToNewSource(long targetPos)
+ throws IOException {
+ return false;
+ }
+
+ @Override
protected synchronized int readWithStrategy(ReaderStrategy strategy,
int off, int len) throws IOException {
dfsClient.checkOpen();