You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by jl...@apache.org on 2015/05/09 00:30:01 UTC

hadoop git commit: MAPREDUCE-5248. Let NNBenchWithoutMR specify the replication factor for its test. Contributed by Erik Paulson

Repository: hadoop
Updated Branches:
  refs/heads/trunk c945c2048 -> 30099a36c


MAPREDUCE-5248. Let NNBenchWithoutMR specify the replication factor for its test. Contributed by Erik Paulson


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/30099a36
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/30099a36
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/30099a36

Branch: refs/heads/trunk
Commit: 30099a36c6b0f658d25fb505a9f3ce15d19f7ba6
Parents: c945c20
Author: Jason Lowe <jl...@apache.org>
Authored: Fri May 8 22:28:50 2015 +0000
Committer: Jason Lowe <jl...@apache.org>
Committed: Fri May 8 22:29:47 2015 +0000

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |  3 +++
 .../apache/hadoop/hdfs/NNBenchWithoutMR.java    | 23 ++++++++++++--------
 2 files changed, 17 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/30099a36/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index ba9da73..4507545 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -322,6 +322,9 @@ Release 2.8.0 - UNRELEASED
     MAPREDUCE-5981. Log levels of certain MR logs can be changed to DEBUG.
     (Varun Saxena via devaraj)
 
+    MAPREDUCE-5248. Let NNBenchWithoutMR specify the replication factor for
+    its test (Erik Paulson via jlowe)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/30099a36/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
index 352d734..8cd6f36 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
@@ -54,6 +54,7 @@ public class NNBenchWithoutMR {
   private static long bytesPerBlock = 1;
   private static long blocksPerFile = 0;
   private static long bytesPerFile = 1;
+  private static short replicationFactorPerFile = 1; // default is 1
   private static Path baseDir = null;
     
   // variables initialized in main()
@@ -106,7 +107,7 @@ public class NNBenchWithoutMR {
         try {
           out = fileSys.create(
                   new Path(taskDir, "" + index), false, 512,
-                  (short)1, bytesPerBlock);
+                  (short)replicationFactorPerFile, bytesPerBlock);
           success = true;
         } catch (IOException ioe) { 
           success=false; 
@@ -263,14 +264,15 @@ public class NNBenchWithoutMR {
     
     String usage =
       "Usage: nnbench " +
-      "  -operation <one of createWrite, openRead, rename, or delete> " +
-      "  -baseDir <base output/input DFS path> " +
-      "  -startTime <time to start, given in seconds from the epoch> " +
-      "  -numFiles <number of files to create> " +
-      "  -blocksPerFile <number of blocks to create per file> " +
-      "  [-bytesPerBlock <number of bytes to write to each block, default is 1>] " +
-      "  [-bytesPerChecksum <value for io.bytes.per.checksum>]" +
-      "Note: bytesPerBlock MUST be a multiple of bytesPerChecksum";
+      "  -operation <one of createWrite, openRead, rename, or delete>\n " +
+      "  -baseDir <base output/input DFS path>\n " +
+      "  -startTime <time to start, given in seconds from the epoch>\n" +
+      "  -numFiles <number of files to create>\n " +
+      "  -replicationFactorPerFile <Replication factor for the files, default is 1>\n" +
+      "  -blocksPerFile <number of blocks to create per file>\n" +
+      "  [-bytesPerBlock <number of bytes to write to each block, default is 1>]\n" +
+      "  [-bytesPerChecksum <value for io.bytes.per.checksum>]\n" +
+      "Note: bytesPerBlock MUST be a multiple of bytesPerChecksum\n";
     
     String operation = null;
     for (int i = 0; i < args.length; i++) { // parse command line
@@ -284,6 +286,8 @@ public class NNBenchWithoutMR {
         bytesPerBlock = Long.parseLong(args[++i]);
       } else if (args[i].equals("-bytesPerChecksum")) {
         bytesPerChecksum = Integer.parseInt(args[++i]);        
+      } else if (args[i].equals("-replicationFactorPerFile")) {
+        replicationFactorPerFile = Short.parseShort(args[++i]);
       } else if (args[i].equals("-startTime")) {
         startTime = Long.parseLong(args[++i]) * 1000;
       } else if (args[i].equals("-operation")) {
@@ -307,6 +311,7 @@ public class NNBenchWithoutMR {
     System.out.println("   baseDir: " + baseDir);
     System.out.println("   startTime: " + startTime);
     System.out.println("   numFiles: " + numFiles);
+    System.out.println("   replicationFactorPerFile: " + replicationFactorPerFile);
     System.out.println("   blocksPerFile: " + blocksPerFile);
     System.out.println("   bytesPerBlock: " + bytesPerBlock);
     System.out.println("   bytesPerChecksum: " + bytesPerChecksum);