You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ac...@apache.org on 2008/03/07 08:02:39 UTC

svn commit: r634563 - in /hadoop/core/trunk: CHANGES.txt src/test/org/apache/hadoop/io/FileBench.java src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java

Author: acmurthy
Date: Thu Mar  6 23:02:37 2008
New Revision: 634563

URL: http://svn.apache.org/viewvc?rev=634563&view=rev
Log:
HADOOP-2958. Fixed FileBench which broke due to HADOOP-2391 which performs a check for existence of the output directory and a trivial bug in GenericMRLoadGenerator where min/max word lenghts were identical since they were looking at the same config variables Contributed by Chris Douglas.

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=634563&r1=634562&r2=634563&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Mar  6 23:02:37 2008
@@ -280,6 +280,12 @@
     HADOOP-2756. NPE in DFSClient while closing DFSOutputStreams 
     under load. (rangadi)
 
+    HADOOP-2958. Fixed FileBench which broke due to HADOOP-2391 which performs
+    a check for existence of the output directory and a trivial bug in
+    GenericMRLoadGenerator where min/max word lenghts were identical since
+    they were looking at the same config variables (Chris Douglas via
+    acmurthy) 
+
 Release 0.16.0 - 2008-02-07
 
   INCOMPATIBLE CHANGES

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java?rev=634563&r1=634562&r2=634563&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java Thu Mar  6 23:02:37 2008
@@ -111,10 +111,11 @@
     Text key = new Text();
     Text val = new Text();
 
+    final String fn = conf.get("test.filebench.name", "");
+    final Path outd = conf.getOutputPath();
     OutputFormat outf = conf.getOutputFormat();
-    Path out = conf.getOutputPath();
     RecordWriter<Text,Text> rw =
-      outf.getRecordWriter(out.getFileSystem(conf), conf, out.toString(),
+      outf.getRecordWriter(outd.getFileSystem(conf), conf, fn,
                            Reporter.NULL);
     try {
       long acc = 0L;
@@ -137,7 +138,8 @@
   @SuppressWarnings("unchecked") // InputFormat instantiation
   static long readBench(JobConf conf) throws IOException {
     InputFormat inf = conf.getInputFormat();
-    Path pin = conf.getInputPaths()[0];
+    final String fn = conf.get("test.filebench.name", "");
+    Path pin = new Path(conf.getInputPaths()[0], fn);
     FileStatus in = pin.getFileSystem(conf).getFileStatus(pin);
     RecordReader rr = inf.getRecordReader(
         new FileSplit(pin, 0, in.getLen(), conf), conf, Reporter.NULL);
@@ -173,6 +175,7 @@
       try {
         if ("-dir".equals(argv[i])) {
           root = new Path(argv[++i]).makeQualified(fs);
+          System.out.println("DIR: " + root.toString());
         } else if ("-seed".equals(argv[i])) {
           job.setLong("filebench.seed", Long.valueOf(argv[++i]));
         } else if (argv[i].startsWith("-no")) {
@@ -199,6 +202,8 @@
     fillBlocks(job);
     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(Text.class);
+    job.setInputPath(root);
+    job.setOutputPath(root);
 
     if (null == cc) cc = EnumSet.allOf(CCodec.class);
     if (null == ct) ct = EnumSet.allOf(CType.class);
@@ -217,7 +222,7 @@
                 typ.name().toUpperCase();
               typ.configure(job);
               System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
-              System.out.println(rwop.exec(new Path(root, fn), job) / 1000 +
+              System.out.println(rwop.exec(fn, job) / 1000 +
                   " seconds");
             }
           } else {
@@ -226,10 +231,10 @@
               cod.name().toUpperCase();
             Path p = new Path(root, fn);
             if (rwop == RW.r && !fs.exists(p)) {
-              p = new Path(root, fn + cod.getExt());
+              fn += cod.getExt();
             }
             System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
-            System.out.println(rwop.exec(p, job) / 1000 +
+            System.out.println(rwop.exec(fn, job) / 1000 +
                 " seconds");
           }
         }
@@ -287,20 +292,20 @@
   }
   enum RW {
     w() {
-      public long exec(Path p, JobConf job) throws IOException {
-        job.setOutputPath(p);
+      public long exec(String fn, JobConf job) throws IOException {
+        job.set("test.filebench.name", fn);
         return writeBench(job);
       }
     },
 
     r() {
-      public long exec(Path p, JobConf job) throws IOException {
-        job.setInputPath(p);
+      public long exec(String fn, JobConf job) throws IOException {
+        job.set("test.filebench.name", fn);
         return readBench(job);
       }
     };
 
-    public abstract long exec(Path p, JobConf job) throws IOException;
+    public abstract long exec(String fn, JobConf job) throws IOException;
   }
   static Map<Class<? extends Enum>, Map<String,? extends Enum>> fullmap
     = new HashMap<Class<? extends Enum>, Map<String,? extends Enum>>();

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java?rev=634563&r1=634562&r2=634563&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/GenericMRLoadGenerator.java Thu Mar  6 23:02:37 2008
@@ -269,9 +269,9 @@
       bytesToWrite = job.getLong("test.randomtextwrite.bytes_per_map",
                                     1*1024*1024*1024);
       keymin = job.getInt("test.randomtextwrite.min_words_key", 5);
-      keymax = job.getInt("test.randomtextwrite.min_words_key", 10);
+      keymax = job.getInt("test.randomtextwrite.max_words_key", 10);
       valmin = job.getInt("test.randomtextwrite.min_words_value", 5);
-      valmax = job.getInt("test.randomtextwrite.min_words_value", 10);
+      valmax = job.getInt("test.randomtextwrite.max_words_value", 10);
     }
 
     public void map(Text key, Text val, OutputCollector<Text,Text> output,