You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zh...@apache.org on 2015/05/04 19:57:45 UTC

[11/50] hadoop git commit: HDFS-8104 Make hard-coded values consistent with the system default schema first before remove them. Contributed by Kai Zheng

HDFS-8104 Make hard-coded values consistent with the system default schema first before remove them. Contributed by Kai Zheng


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/515deb9e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/515deb9e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/515deb9e

Branch: refs/heads/HDFS-7285
Commit: 515deb9e72f670d05bf69da7f96ab715e1783d02
Parents: 3e4c3dd
Author: Kai Zheng <ka...@intel.com>
Authored: Fri Apr 10 00:16:28 2015 +0800
Committer: Zhe Zhang <zh...@apache.org>
Committed: Mon May 4 10:13:17 2015 -0700

----------------------------------------------------------------------
 .../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt        |   4 +-
 .../hadoop/hdfs/TestPlanReadPortions.java       | 142 +++++++++++++++++++
 .../apache/hadoop/hdfs/TestReadStripedFile.java | 112 ---------------
 3 files changed, 145 insertions(+), 113 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/515deb9e/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index 5078a15..1e695c4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -54,4 +54,6 @@
     HDFS-8023. Erasure Coding: retrieve eraure coding schema for a file from
     NameNode (vinayakumarb)
     
-    HDFS-8074. Define a system-wide default EC schema. (Kai Zheng)
\ No newline at end of file
+    HDFS-8074. Define a system-wide default EC schema. (Kai Zheng)
+
+    HDFS-8104. Make hard-coded values consistent with the system default schema first before remove them. (Kai Zheng)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/515deb9e/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPlanReadPortions.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPlanReadPortions.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPlanReadPortions.java
new file mode 100644
index 0000000..cf84b30
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPlanReadPortions.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import org.junit.Test;
+
+import static org.apache.hadoop.hdfs.DFSStripedInputStream.ReadPortion;
+import static org.junit.Assert.*;
+
+public class TestPlanReadPortions {
+
+  // We only support this as num of data blocks. It might be good enough for now
+  // for the purpose, even not flexible yet for any number in a schema.
+  private final short GROUP_SIZE = 3;
+  private final int CELLSIZE = 128 * 1024;
+
+  private void testPlanReadPortions(int startInBlk, int length,
+      int bufferOffset, int[] readLengths, int[] offsetsInBlock,
+      int[][] bufferOffsets, int[][] bufferLengths) {
+    ReadPortion[] results = DFSStripedInputStream.planReadPortions(GROUP_SIZE,
+        CELLSIZE, startInBlk, length, bufferOffset);
+    assertEquals(GROUP_SIZE, results.length);
+
+    for (int i = 0; i < GROUP_SIZE; i++) {
+      assertEquals(readLengths[i], results[i].getReadLength());
+      assertEquals(offsetsInBlock[i], results[i].getStartOffsetInBlock());
+      final int[] bOffsets = results[i].getOffsets();
+      assertArrayEquals(bufferOffsets[i], bOffsets);
+      final int[] bLengths = results[i].getLengths();
+      assertArrayEquals(bufferLengths[i], bLengths);
+    }
+  }
+
+  /**
+   * Test {@link DFSStripedInputStream#planReadPortions}
+   */
+  @Test
+  public void testPlanReadPortions() {
+    /**
+     * start block offset is 0, read cellSize - 10
+     */
+    testPlanReadPortions(0, CELLSIZE - 10, 0,
+        new int[]{CELLSIZE - 10, 0, 0}, new int[]{0, 0, 0},
+        new int[][]{new int[]{0}, new int[]{}, new int[]{}},
+        new int[][]{new int[]{CELLSIZE - 10}, new int[]{}, new int[]{}});
+
+    /**
+     * start block offset is 0, read 3 * cellSize
+     */
+    testPlanReadPortions(0, GROUP_SIZE * CELLSIZE, 0,
+        new int[]{CELLSIZE, CELLSIZE, CELLSIZE}, new int[]{0, 0, 0},
+        new int[][]{new int[]{0}, new int[]{CELLSIZE}, new int[]{CELLSIZE * 2}},
+        new int[][]{new int[]{CELLSIZE}, new int[]{CELLSIZE}, new int[]{CELLSIZE}});
+
+    /**
+     * start block offset is 0, read cellSize + 10
+     */
+    testPlanReadPortions(0, CELLSIZE + 10, 0,
+        new int[]{CELLSIZE, 10, 0}, new int[]{0, 0, 0},
+        new int[][]{new int[]{0}, new int[]{CELLSIZE}, new int[]{}},
+        new int[][]{new int[]{CELLSIZE}, new int[]{10}, new int[]{}});
+
+    /**
+     * start block offset is 0, read 5 * cellSize + 10, buffer start offset is 100
+     */
+    testPlanReadPortions(0, 5 * CELLSIZE + 10, 100,
+        new int[]{CELLSIZE * 2, CELLSIZE * 2, CELLSIZE + 10}, new int[]{0, 0, 0},
+        new int[][]{new int[]{100, 100 + CELLSIZE * GROUP_SIZE},
+            new int[]{100 + CELLSIZE, 100 + CELLSIZE * 4},
+            new int[]{100 + CELLSIZE * 2, 100 + CELLSIZE * 5}},
+        new int[][]{new int[]{CELLSIZE, CELLSIZE},
+            new int[]{CELLSIZE, CELLSIZE},
+            new int[]{CELLSIZE, 10}});
+
+    /**
+     * start block offset is 2, read 3 * cellSize
+     */
+    testPlanReadPortions(2, GROUP_SIZE * CELLSIZE, 100,
+        new int[]{CELLSIZE, CELLSIZE, CELLSIZE},
+        new int[]{2, 0, 0},
+        new int[][]{new int[]{100, 100 + GROUP_SIZE * CELLSIZE - 2},
+            new int[]{100 + CELLSIZE - 2},
+            new int[]{100 + CELLSIZE * 2 - 2}},
+        new int[][]{new int[]{CELLSIZE - 2, 2},
+            new int[]{CELLSIZE},
+            new int[]{CELLSIZE}});
+
+    /**
+     * start block offset is 2, read 3 * cellSize + 10
+     */
+    testPlanReadPortions(2, GROUP_SIZE * CELLSIZE + 10, 0,
+        new int[]{CELLSIZE + 10, CELLSIZE, CELLSIZE},
+        new int[]{2, 0, 0},
+        new int[][]{new int[]{0, GROUP_SIZE * CELLSIZE - 2},
+            new int[]{CELLSIZE - 2},
+            new int[]{CELLSIZE * 2 - 2}},
+        new int[][]{new int[]{CELLSIZE - 2, 12},
+            new int[]{CELLSIZE},
+            new int[]{CELLSIZE}});
+
+    /**
+     * start block offset is cellSize * 2 - 1, read 5 * cellSize + 10
+     */
+    testPlanReadPortions(CELLSIZE * 2 - 1, 5 * CELLSIZE + 10, 0,
+        new int[]{CELLSIZE * 2, CELLSIZE + 10, CELLSIZE * 2},
+        new int[]{CELLSIZE, CELLSIZE - 1, 0},
+        new int[][]{new int[]{CELLSIZE + 1, 4 * CELLSIZE + 1},
+            new int[]{0, 2 * CELLSIZE + 1, 5 * CELLSIZE + 1},
+            new int[]{1, 3 * CELLSIZE + 1}},
+        new int[][]{new int[]{CELLSIZE, CELLSIZE},
+            new int[]{1, CELLSIZE, 9},
+            new int[]{CELLSIZE, CELLSIZE}});
+
+    /**
+     * start block offset is cellSize * 6 - 1, read 7 * cellSize + 10
+     */
+    testPlanReadPortions(CELLSIZE * 6 - 1, 7 * CELLSIZE + 10, 0,
+        new int[]{CELLSIZE * 3, CELLSIZE * 2 + 9, CELLSIZE * 2 + 1},
+        new int[]{CELLSIZE * 2, CELLSIZE * 2, CELLSIZE * 2 - 1},
+        new int[][]{new int[]{1, 3 * CELLSIZE + 1, 6 * CELLSIZE + 1},
+            new int[]{CELLSIZE + 1, 4 * CELLSIZE + 1, 7 * CELLSIZE + 1},
+            new int[]{0, 2 * CELLSIZE + 1, 5 * CELLSIZE + 1}},
+        new int[][]{new int[]{CELLSIZE, CELLSIZE, CELLSIZE},
+            new int[]{CELLSIZE, CELLSIZE, 9},
+            new int[]{1, CELLSIZE, CELLSIZE}});
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/515deb9e/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFile.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFile.java
index 0032bdd..849e12e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFile.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock;
-import static org.apache.hadoop.hdfs.DFSStripedInputStream.ReadPortion;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -78,117 +77,6 @@ public class TestReadStripedFile {
     }
   }
 
-  private void testPlanReadPortions(int startInBlk, int length,
-      int bufferOffset, int[] readLengths, int[] offsetsInBlock,
-      int[][] bufferOffsets, int[][] bufferLengths) {
-    ReadPortion[] results = DFSStripedInputStream.planReadPortions(GROUP_SIZE,
-        CELLSIZE, startInBlk, length, bufferOffset);
-    assertEquals(GROUP_SIZE, results.length);
-
-    for (int i = 0; i < GROUP_SIZE; i++) {
-      assertEquals(readLengths[i], results[i].getReadLength());
-      assertEquals(offsetsInBlock[i], results[i].getStartOffsetInBlock());
-      final int[] bOffsets = results[i].getOffsets();
-      assertArrayEquals(bufferOffsets[i], bOffsets);
-      final int[] bLengths = results[i].getLengths();
-      assertArrayEquals(bufferLengths[i], bLengths);
-    }
-  }
-
-  /**
-   * Test {@link DFSStripedInputStream#planReadPortions}
-   */
-  @Test
-  public void testPlanReadPortions() {
-    /**
-     * start block offset is 0, read cellSize - 10
-     */
-    testPlanReadPortions(0, CELLSIZE - 10, 0,
-        new int[]{CELLSIZE - 10, 0, 0}, new int[]{0, 0, 0},
-        new int[][]{new int[]{0}, new int[]{}, new int[]{}},
-        new int[][]{new int[]{CELLSIZE - 10}, new int[]{}, new int[]{}});
-
-    /**
-     * start block offset is 0, read 3 * cellSize
-     */
-    testPlanReadPortions(0, GROUP_SIZE * CELLSIZE, 0,
-        new int[]{CELLSIZE, CELLSIZE, CELLSIZE}, new int[]{0, 0, 0},
-        new int[][]{new int[]{0}, new int[]{CELLSIZE}, new int[]{CELLSIZE * 2}},
-        new int[][]{new int[]{CELLSIZE}, new int[]{CELLSIZE}, new int[]{CELLSIZE}});
-
-    /**
-     * start block offset is 0, read cellSize + 10
-     */
-    testPlanReadPortions(0, CELLSIZE + 10, 0,
-        new int[]{CELLSIZE, 10, 0}, new int[]{0, 0, 0},
-        new int[][]{new int[]{0}, new int[]{CELLSIZE}, new int[]{}},
-        new int[][]{new int[]{CELLSIZE}, new int[]{10}, new int[]{}});
-
-    /**
-     * start block offset is 0, read 5 * cellSize + 10, buffer start offset is 100
-     */
-    testPlanReadPortions(0, 5 * CELLSIZE + 10, 100,
-        new int[]{CELLSIZE * 2, CELLSIZE * 2, CELLSIZE + 10}, new int[]{0, 0, 0},
-        new int[][]{new int[]{100, 100 + CELLSIZE * GROUP_SIZE},
-            new int[]{100 + CELLSIZE, 100 + CELLSIZE * 4},
-            new int[]{100 + CELLSIZE * 2, 100 + CELLSIZE * 5}},
-        new int[][]{new int[]{CELLSIZE, CELLSIZE},
-            new int[]{CELLSIZE, CELLSIZE},
-            new int[]{CELLSIZE, 10}});
-
-    /**
-     * start block offset is 2, read 3 * cellSize
-     */
-    testPlanReadPortions(2, GROUP_SIZE * CELLSIZE, 100,
-        new int[]{CELLSIZE, CELLSIZE, CELLSIZE},
-        new int[]{2, 0, 0},
-        new int[][]{new int[]{100, 100 + GROUP_SIZE * CELLSIZE - 2},
-            new int[]{100 + CELLSIZE - 2},
-            new int[]{100 + CELLSIZE * 2 - 2}},
-        new int[][]{new int[]{CELLSIZE - 2, 2},
-            new int[]{CELLSIZE},
-            new int[]{CELLSIZE}});
-
-    /**
-     * start block offset is 2, read 3 * cellSize + 10
-     */
-    testPlanReadPortions(2, GROUP_SIZE * CELLSIZE + 10, 0,
-        new int[]{CELLSIZE + 10, CELLSIZE, CELLSIZE},
-        new int[]{2, 0, 0},
-        new int[][]{new int[]{0, GROUP_SIZE * CELLSIZE - 2},
-            new int[]{CELLSIZE - 2},
-            new int[]{CELLSIZE * 2 - 2}},
-        new int[][]{new int[]{CELLSIZE - 2, 12},
-            new int[]{CELLSIZE},
-            new int[]{CELLSIZE}});
-
-    /**
-     * start block offset is cellSize * 2 - 1, read 5 * cellSize + 10
-     */
-    testPlanReadPortions(CELLSIZE * 2 - 1, 5 * CELLSIZE + 10, 0,
-        new int[]{CELLSIZE * 2, CELLSIZE + 10, CELLSIZE * 2},
-        new int[]{CELLSIZE, CELLSIZE - 1, 0},
-        new int[][]{new int[]{CELLSIZE + 1, 4 * CELLSIZE + 1},
-            new int[]{0, 2 * CELLSIZE + 1, 5 * CELLSIZE + 1},
-            new int[]{1, 3 * CELLSIZE + 1}},
-        new int[][]{new int[]{CELLSIZE, CELLSIZE},
-            new int[]{1, CELLSIZE, 9},
-            new int[]{CELLSIZE, CELLSIZE}});
-
-    /**
-     * start block offset is cellSize * 6 - 1, read 7 * cellSize + 10
-     */
-    testPlanReadPortions(CELLSIZE * 6 - 1, 7 * CELLSIZE + 10, 0,
-        new int[]{CELLSIZE * 3, CELLSIZE * 2 + 9, CELLSIZE * 2 + 1},
-        new int[]{CELLSIZE * 2, CELLSIZE * 2, CELLSIZE * 2 - 1},
-        new int[][]{new int[]{1, 3 * CELLSIZE + 1, 6 * CELLSIZE + 1},
-            new int[]{CELLSIZE + 1, 4 * CELLSIZE + 1, 7 * CELLSIZE + 1},
-            new int[]{0, 2 * CELLSIZE + 1, 5 * CELLSIZE + 1}},
-        new int[][]{new int[]{CELLSIZE, CELLSIZE, CELLSIZE},
-            new int[]{CELLSIZE, CELLSIZE, 9},
-            new int[]{1, CELLSIZE, CELLSIZE}});
-  }
-
   private LocatedStripedBlock createDummyLocatedBlock() {
     final long blockGroupID = -1048576;
     DatanodeInfo[] locs = new DatanodeInfo[TOTAL_SIZE];