You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/04/16 23:44:46 UTC

svn commit: r529410 [27/27] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/examples/org/apache/hadoop/abacus/examples/ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/ src/...

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java Mon Apr 16 14:44:35 2007
@@ -46,254 +46,254 @@
  */
 public class TestJobControl extends junit.framework.TestCase {
 
-    private static NumberFormat idFormat = NumberFormat.getInstance();
-    static {
-        idFormat.setMinimumIntegerDigits(4);
-        idFormat.setGroupingUsed(false);
-    }
-
-    static private Random rand = new Random();
-
-    private static void cleanData(FileSystem fs, Path dirPath)
-            throws IOException {
-        fs.delete(dirPath);
-    }
-
-    private static String generateRandomWord() {
-        return idFormat.format(rand.nextLong());
-    }
-
-    private static String generateRandomLine() {
-        long r = rand.nextLong() % 7;
-        long n = r + 20;
-        StringBuffer sb = new StringBuffer();
-        for (int i = 0; i < n; i++) {
-            sb.append(generateRandomWord()).append(" ");
-        }
-        sb.append("\n");
-        return sb.toString();
-    }
-
-    private static void generateData(FileSystem fs, Path dirPath)
-            throws IOException {
-        FSDataOutputStream out = fs.create(new Path(dirPath, "data.txt"));
-        for (int i = 0; i < 100000; i++) {
-            String line = TestJobControl.generateRandomLine();
-            out.write(line.getBytes("UTF-8"));
-        }
-        out.close();
-    }
-
-    public static class DataCopy extends MapReduceBase implements Mapper,
-            Reducer {
-        public void map(WritableComparable key, Writable value,
-                OutputCollector output, Reporter reporter) throws IOException {
-            output.collect(new Text(key.toString()), value);
-        }
-
-        public void reduce(WritableComparable key, Iterator values,
-                OutputCollector output, Reporter reporter) throws IOException {
-            Text dumbKey = new Text("");
-            while (values.hasNext()) {
-                Text data = (Text) values.next();
-                output.collect(dumbKey, data);
-            }
-        }
-    }
-
-    private static JobConf createCopyJob(ArrayList indirs, Path outdir)
-            throws Exception {
-
-        Configuration defaults = new Configuration();
-        JobConf theJob = new JobConf(defaults, TestJobControl.class);
-        theJob.setJobName("DataMoveJob");
-
-        theJob.setInputPath((Path) indirs.get(0));
-        if (indirs.size() > 1) {
-            for (int i = 1; i < indirs.size(); i++) {
-                theJob.addInputPath((Path) indirs.get(i));
-            }
-        }
-        theJob.setMapperClass(DataCopy.class);
-        theJob.setOutputPath(outdir);
-        theJob.setOutputKeyClass(Text.class);
-        theJob.setOutputValueClass(Text.class);
-        theJob.setReducerClass(DataCopy.class);
-        theJob.setNumMapTasks(12);
-        theJob.setNumReduceTasks(4);
-        return theJob;
-    }
-
-    /**
-     * This is a main function for testing JobControl class.
-     * It first cleans all the dirs it will use. Then it generates some random text
-     * data in TestJobControlData/indir. Then it creates 4 jobs: 
-     *      Job 1: copy data from indir to outdir_1
-     *      Job 2: copy data from indir to outdir_2
-     *      Job 3: copy data from outdir_1 and outdir_2 to outdir_3
-     *      Job 4: copy data from outdir to outdir_4
-     * The jobs 1 and 2 have no dependency. The job 3 depends on jobs 1 and 2.
-     * The job 4 depends on job 3.
-     * 
-     * Then it creates a JobControl object and add the 4 jobs to the JobControl object.
-     * Finally, it creates a thread to run the JobControl object and monitors/reports
-     * the job states.
-     * 
-     * @param args
-     */
-    public static void doJobControlTest() throws Exception {
-        
-        Configuration defaults = new Configuration();
-        FileSystem fs = FileSystem.get(defaults);
-        Path rootDataDir = new Path(System.getProperty("test.build.data", "."), "TestJobControlData");
-        Path indir = new Path(rootDataDir, "indir");
-        Path outdir_1 = new Path(rootDataDir, "outdir_1");
-        Path outdir_2 = new Path(rootDataDir, "outdir_2");
-        Path outdir_3 = new Path(rootDataDir, "outdir_3");
-        Path outdir_4 = new Path(rootDataDir, "outdir_4");
-
-        cleanData(fs, indir);
-        generateData(fs, indir);
-
-        cleanData(fs, outdir_1);
-        cleanData(fs, outdir_2);
-        cleanData(fs, outdir_3);
-        cleanData(fs, outdir_4);
-
-        ArrayList dependingJobs = null;
-
-        ArrayList inPaths_1 = new ArrayList();
-        inPaths_1.add(indir);
-        JobConf jobConf_1 = createCopyJob(inPaths_1, outdir_1);
-        Job job_1 = new Job(jobConf_1, dependingJobs);
-        ArrayList inPaths_2 = new ArrayList();
-        inPaths_2.add(indir);
-        JobConf jobConf_2 = createCopyJob(inPaths_2, outdir_2);
-        Job job_2 = new Job(jobConf_2, dependingJobs);
-
-        ArrayList inPaths_3 = new ArrayList();
-        inPaths_3.add(outdir_1);
-        inPaths_3.add(outdir_2);
-        JobConf jobConf_3 = createCopyJob(inPaths_3, outdir_3);
-        dependingJobs = new ArrayList();
-        dependingJobs.add(job_1);
-        dependingJobs.add(job_2);
-        Job job_3 = new Job(jobConf_3, dependingJobs);
-
-        ArrayList inPaths_4 = new ArrayList();
-        inPaths_4.add(outdir_3);
-        JobConf jobConf_4 = createCopyJob(inPaths_4, outdir_4);
-        dependingJobs = new ArrayList();
-        dependingJobs.add(job_3);
-        Job job_4 = new Job(jobConf_4, dependingJobs);
-
-        JobControl theControl = new JobControl("Test");
-        theControl.addJob(job_1);
-        theControl.addJob(job_2);
-        theControl.addJob(job_3);
-        theControl.addJob(job_4);
-
-        Thread theController = new Thread(theControl);
-        theController.start();
-        while (!theControl.allFinished()) {
-
-            System.out.println("Jobs in waiting state: "
-                    + theControl.getWaitingJobs().size());
-            System.out.println("Jobs in ready state: "
-                    + theControl.getReadyJobs().size());
-            System.out.println("Jobs in running state: "
-                    + theControl.getRunningJobs().size());
-            System.out.println("Jobs in success state: "
-                    + theControl.getSuccessfulJobs().size());
-            System.out.println("Jobs in failed state: "
-                    + theControl.getFailedJobs().size());
-            System.out.println("\n");
-
-            try {
-                Thread.sleep(5000);
-            } catch (Exception e) {
-
-            }
-        }
-        System.out.println("Jobs are all done???");
-        System.out.println("Jobs in waiting state: "
-                + theControl.getWaitingJobs().size());
-        System.out.println("Jobs in ready state: "
-                + theControl.getReadyJobs().size());
-        System.out.println("Jobs in running state: "
-                + theControl.getRunningJobs().size());
-        System.out.println("Jobs in success state: "
-                + theControl.getSuccessfulJobs().size());
-        System.out.println("Jobs in failed state: "
-                + theControl.getFailedJobs().size());
-        System.out.println("\n");
-        
-        if (job_1.getState() != Job.FAILED && 
-                job_1.getState() != Job.DEPENDENT_FAILED && 
-                job_1.getState() != Job.SUCCESS) {
+  private static NumberFormat idFormat = NumberFormat.getInstance();
+  static {
+    idFormat.setMinimumIntegerDigits(4);
+    idFormat.setGroupingUsed(false);
+  }
+
+  static private Random rand = new Random();
+
+  private static void cleanData(FileSystem fs, Path dirPath)
+    throws IOException {
+    fs.delete(dirPath);
+  }
+
+  private static String generateRandomWord() {
+    return idFormat.format(rand.nextLong());
+  }
+
+  private static String generateRandomLine() {
+    long r = rand.nextLong() % 7;
+    long n = r + 20;
+    StringBuffer sb = new StringBuffer();
+    for (int i = 0; i < n; i++) {
+      sb.append(generateRandomWord()).append(" ");
+    }
+    sb.append("\n");
+    return sb.toString();
+  }
+
+  private static void generateData(FileSystem fs, Path dirPath)
+    throws IOException {
+    FSDataOutputStream out = fs.create(new Path(dirPath, "data.txt"));
+    for (int i = 0; i < 100000; i++) {
+      String line = TestJobControl.generateRandomLine();
+      out.write(line.getBytes("UTF-8"));
+    }
+    out.close();
+  }
+
+  public static class DataCopy extends MapReduceBase implements Mapper,
+                                                                Reducer {
+    public void map(WritableComparable key, Writable value,
+                    OutputCollector output, Reporter reporter) throws IOException {
+      output.collect(new Text(key.toString()), value);
+    }
+
+    public void reduce(WritableComparable key, Iterator values,
+                       OutputCollector output, Reporter reporter) throws IOException {
+      Text dumbKey = new Text("");
+      while (values.hasNext()) {
+        Text data = (Text) values.next();
+        output.collect(dumbKey, data);
+      }
+    }
+  }
+
+  private static JobConf createCopyJob(ArrayList indirs, Path outdir)
+    throws Exception {
+
+    Configuration defaults = new Configuration();
+    JobConf theJob = new JobConf(defaults, TestJobControl.class);
+    theJob.setJobName("DataMoveJob");
+
+    theJob.setInputPath((Path) indirs.get(0));
+    if (indirs.size() > 1) {
+      for (int i = 1; i < indirs.size(); i++) {
+        theJob.addInputPath((Path) indirs.get(i));
+      }
+    }
+    theJob.setMapperClass(DataCopy.class);
+    theJob.setOutputPath(outdir);
+    theJob.setOutputKeyClass(Text.class);
+    theJob.setOutputValueClass(Text.class);
+    theJob.setReducerClass(DataCopy.class);
+    theJob.setNumMapTasks(12);
+    theJob.setNumReduceTasks(4);
+    return theJob;
+  }
+
+  /**
+   * This is a main function for testing JobControl class.
+   * It first cleans all the dirs it will use. Then it generates some random text
+   * data in TestJobControlData/indir. Then it creates 4 jobs: 
+   *      Job 1: copy data from indir to outdir_1
+   *      Job 2: copy data from indir to outdir_2
+   *      Job 3: copy data from outdir_1 and outdir_2 to outdir_3
+   *      Job 4: copy data from outdir to outdir_4
+   * The jobs 1 and 2 have no dependency. The job 3 depends on jobs 1 and 2.
+   * The job 4 depends on job 3.
+   * 
+   * Then it creates a JobControl object and add the 4 jobs to the JobControl object.
+   * Finally, it creates a thread to run the JobControl object and monitors/reports
+   * the job states.
+   * 
+   * @param args
+   */
+  public static void doJobControlTest() throws Exception {
+        
+    Configuration defaults = new Configuration();
+    FileSystem fs = FileSystem.get(defaults);
+    Path rootDataDir = new Path(System.getProperty("test.build.data", "."), "TestJobControlData");
+    Path indir = new Path(rootDataDir, "indir");
+    Path outdir_1 = new Path(rootDataDir, "outdir_1");
+    Path outdir_2 = new Path(rootDataDir, "outdir_2");
+    Path outdir_3 = new Path(rootDataDir, "outdir_3");
+    Path outdir_4 = new Path(rootDataDir, "outdir_4");
+
+    cleanData(fs, indir);
+    generateData(fs, indir);
+
+    cleanData(fs, outdir_1);
+    cleanData(fs, outdir_2);
+    cleanData(fs, outdir_3);
+    cleanData(fs, outdir_4);
+
+    ArrayList dependingJobs = null;
+
+    ArrayList inPaths_1 = new ArrayList();
+    inPaths_1.add(indir);
+    JobConf jobConf_1 = createCopyJob(inPaths_1, outdir_1);
+    Job job_1 = new Job(jobConf_1, dependingJobs);
+    ArrayList inPaths_2 = new ArrayList();
+    inPaths_2.add(indir);
+    JobConf jobConf_2 = createCopyJob(inPaths_2, outdir_2);
+    Job job_2 = new Job(jobConf_2, dependingJobs);
+
+    ArrayList inPaths_3 = new ArrayList();
+    inPaths_3.add(outdir_1);
+    inPaths_3.add(outdir_2);
+    JobConf jobConf_3 = createCopyJob(inPaths_3, outdir_3);
+    dependingJobs = new ArrayList();
+    dependingJobs.add(job_1);
+    dependingJobs.add(job_2);
+    Job job_3 = new Job(jobConf_3, dependingJobs);
+
+    ArrayList inPaths_4 = new ArrayList();
+    inPaths_4.add(outdir_3);
+    JobConf jobConf_4 = createCopyJob(inPaths_4, outdir_4);
+    dependingJobs = new ArrayList();
+    dependingJobs.add(job_3);
+    Job job_4 = new Job(jobConf_4, dependingJobs);
+
+    JobControl theControl = new JobControl("Test");
+    theControl.addJob(job_1);
+    theControl.addJob(job_2);
+    theControl.addJob(job_3);
+    theControl.addJob(job_4);
+
+    Thread theController = new Thread(theControl);
+    theController.start();
+    while (!theControl.allFinished()) {
+
+      System.out.println("Jobs in waiting state: "
+                         + theControl.getWaitingJobs().size());
+      System.out.println("Jobs in ready state: "
+                         + theControl.getReadyJobs().size());
+      System.out.println("Jobs in running state: "
+                         + theControl.getRunningJobs().size());
+      System.out.println("Jobs in success state: "
+                         + theControl.getSuccessfulJobs().size());
+      System.out.println("Jobs in failed state: "
+                         + theControl.getFailedJobs().size());
+      System.out.println("\n");
+
+      try {
+        Thread.sleep(5000);
+      } catch (Exception e) {
+
+      }
+    }
+    System.out.println("Jobs are all done???");
+    System.out.println("Jobs in waiting state: "
+                       + theControl.getWaitingJobs().size());
+    System.out.println("Jobs in ready state: "
+                       + theControl.getReadyJobs().size());
+    System.out.println("Jobs in running state: "
+                       + theControl.getRunningJobs().size());
+    System.out.println("Jobs in success state: "
+                       + theControl.getSuccessfulJobs().size());
+    System.out.println("Jobs in failed state: "
+                       + theControl.getFailedJobs().size());
+    System.out.println("\n");
+        
+    if (job_1.getState() != Job.FAILED && 
+        job_1.getState() != Job.DEPENDENT_FAILED && 
+        job_1.getState() != Job.SUCCESS) {
            
-                String states = "job_1:  " + job_1.getState() + "\n";
-                throw new Exception("The state of job_1 is not in a complete state\n" + states);
-        }
-        
-        if (job_2.getState() != Job.FAILED &&
-                job_2.getState() != Job.DEPENDENT_FAILED && 
-                job_2.getState() != Job.SUCCESS) {
-           
-                String states = "job_2:  " + job_2.getState() + "\n";
-                throw new Exception("The state of job_2 is not in a complete state\n" + states);
-        }
-        
-        if (job_3.getState() != Job.FAILED && 
-                job_3.getState() != Job.DEPENDENT_FAILED && 
-                job_3.getState() != Job.SUCCESS) {
-           
-                String states = "job_3:  " + job_3.getState() + "\n";
-                throw new Exception("The state of job_3 is not in a complete state\n" + states);
-        }
-        if (job_4.getState() != Job.FAILED && 
-                job_4.getState() != Job.DEPENDENT_FAILED && 
-                job_4.getState() != Job.SUCCESS) {
+      String states = "job_1:  " + job_1.getState() + "\n";
+      throw new Exception("The state of job_1 is not in a complete state\n" + states);
+    }
+        
+    if (job_2.getState() != Job.FAILED &&
+        job_2.getState() != Job.DEPENDENT_FAILED && 
+        job_2.getState() != Job.SUCCESS) {
            
-                String states = "job_4:  " + job_4.getState() + "\n";
-                throw new Exception("The state of job_4 is not in a complete state\n" + states);
-        }
-        
-        if (job_1.getState() == Job.FAILED || 
-                job_2.getState() == Job.FAILED ||
-                job_1.getState() == Job.DEPENDENT_FAILED || 
-                job_2.getState() == Job.DEPENDENT_FAILED) {
-            if (job_3.getState() != Job.DEPENDENT_FAILED) {
-                String states = "job_1:  " + job_1.getState() + "\n";
-                states = "job_2:  " + job_2.getState() + "\n";
-                states = "job_3:  " + job_3.getState() + "\n";
-                states = "job_4:  " + job_4.getState() + "\n";
-                throw new Exception("The states of jobs 1, 2, 3, 4 are not consistent\n" + states);
-            }
-        }
-        if (job_3.getState() == Job.FAILED || 
-                job_3.getState() == Job.DEPENDENT_FAILED) {
-            if (job_4.getState() != Job.DEPENDENT_FAILED) {
-                String states = "job_3:  " + job_3.getState() + "\n";
-                states = "job_4:  " + job_4.getState() + "\n";
-                throw new Exception("The states of jobs 3, 4 are not consistent\n" + states);
-            }
-        }
+      String states = "job_2:  " + job_2.getState() + "\n";
+      throw new Exception("The state of job_2 is not in a complete state\n" + states);
+    }
         
-        theControl.stop();
+    if (job_3.getState() != Job.FAILED && 
+        job_3.getState() != Job.DEPENDENT_FAILED && 
+        job_3.getState() != Job.SUCCESS) {
+           
+      String states = "job_3:  " + job_3.getState() + "\n";
+      throw new Exception("The state of job_3 is not in a complete state\n" + states);
     }
-
-    public void testJobControl() throws Exception {
-        doJobControlTest();
+    if (job_4.getState() != Job.FAILED && 
+        job_4.getState() != Job.DEPENDENT_FAILED && 
+        job_4.getState() != Job.SUCCESS) {
+           
+      String states = "job_4:  " + job_4.getState() + "\n";
+      throw new Exception("The state of job_4 is not in a complete state\n" + states);
     }
+        
+    if (job_1.getState() == Job.FAILED || 
+        job_2.getState() == Job.FAILED ||
+        job_1.getState() == Job.DEPENDENT_FAILED || 
+        job_2.getState() == Job.DEPENDENT_FAILED) {
+      if (job_3.getState() != Job.DEPENDENT_FAILED) {
+        String states = "job_1:  " + job_1.getState() + "\n";
+        states = "job_2:  " + job_2.getState() + "\n";
+        states = "job_3:  " + job_3.getState() + "\n";
+        states = "job_4:  " + job_4.getState() + "\n";
+        throw new Exception("The states of jobs 1, 2, 3, 4 are not consistent\n" + states);
+      }
+    }
+    if (job_3.getState() == Job.FAILED || 
+        job_3.getState() == Job.DEPENDENT_FAILED) {
+      if (job_4.getState() != Job.DEPENDENT_FAILED) {
+        String states = "job_3:  " + job_3.getState() + "\n";
+        states = "job_4:  " + job_4.getState() + "\n";
+        throw new Exception("The states of jobs 3, 4 are not consistent\n" + states);
+      }
+    }
+        
+    theControl.stop();
+  }
+
+  public void testJobControl() throws Exception {
+    doJobControlTest();
+  }
     
-    public static void main(String[] args) {
-        TestJobControl test = new TestJobControl();
-        try {
-            test.testJobControl();
-        }
-        catch (Exception e) {
-            e.printStackTrace();
-        }
+  public static void main(String[] args) {
+    TestJobControl test = new TestJobControl();
+    try {
+      test.testJobControl();
+    }
+    catch (Exception e) {
+      e.printStackTrace();
     }
+  }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/FromCpp.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/FromCpp.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/FromCpp.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/FromCpp.java Mon Apr 16 14:44:35 2007
@@ -31,92 +31,92 @@
  */
 public class FromCpp extends TestCase {
     
-    public FromCpp(String testName) {
-        super(testName);
-    }
+  public FromCpp(String testName) {
+    super(testName);
+  }
 
-    protected void setUp() throws Exception {
-    }
+  protected void setUp() throws Exception {
+  }
 
-    protected void tearDown() throws Exception {
-    }
+  protected void tearDown() throws Exception {
+  }
     
-    public void testBinary() {
-        File tmpfile;
-        try {
-            tmpfile = new File("/temp/hadooptmp.dat");
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            FileInputStream istream = new FileInputStream(tmpfile);
-            BinaryRecordInput in = new BinaryRecordInput(istream);
-            RecRecord1 r2 = new RecRecord1();
-            r2.deserialize(in, "");
-            istream.close();
-            assertTrue(r1.equals(r2));
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testBinary() {
+    File tmpfile;
+    try {
+      tmpfile = new File("/temp/hadooptmp.dat");
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      FileInputStream istream = new FileInputStream(tmpfile);
+      BinaryRecordInput in = new BinaryRecordInput(istream);
+      RecRecord1 r2 = new RecRecord1();
+      r2.deserialize(in, "");
+      istream.close();
+      assertTrue(r1.equals(r2));
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
     
-    public void testCsv() {
-        File tmpfile;
-        try {
-            tmpfile = new File("/temp/hadooptmp.txt");
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            FileInputStream istream = new FileInputStream(tmpfile);
-            CsvRecordInput in = new CsvRecordInput(istream);
-            RecRecord1 r2 = new RecRecord1();
-            r2.deserialize(in, "");
-            istream.close();
-            assertTrue(r1.equals(r2));
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testCsv() {
+    File tmpfile;
+    try {
+      tmpfile = new File("/temp/hadooptmp.txt");
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      FileInputStream istream = new FileInputStream(tmpfile);
+      CsvRecordInput in = new CsvRecordInput(istream);
+      RecRecord1 r2 = new RecRecord1();
+      r2.deserialize(in, "");
+      istream.close();
+      assertTrue(r1.equals(r2));
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
 
-    public void testXml() {
-        File tmpfile;
-        try {
-            tmpfile = new File("/temp/hadooptmp.xml");
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            FileInputStream istream = new FileInputStream(tmpfile);
-            XmlRecordInput in = new XmlRecordInput(istream);
-            RecRecord1 r2 = new RecRecord1();
-            r2.deserialize(in, "");
-            istream.close();
-            assertTrue(r1.equals(r2));
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testXml() {
+    File tmpfile;
+    try {
+      tmpfile = new File("/temp/hadooptmp.xml");
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      FileInputStream istream = new FileInputStream(tmpfile);
+      XmlRecordInput in = new XmlRecordInput(istream);
+      RecRecord1 r2 = new RecRecord1();
+      r2.deserialize(in, "");
+      istream.close();
+      assertTrue(r1.equals(r2));
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
 
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/RecordBench.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/RecordBench.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/RecordBench.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/RecordBench.java Mon Apr 16 14:44:35 2007
@@ -90,8 +90,8 @@
     Method init = null;
     try {
       init = RecordBench.class.getDeclaredMethod("init"+
-          toCamelCase(type) + "s",
-          new Class[] {Record[].class});
+                                                 toCamelCase(type) + "s",
+                                                 new Class[] {Record[].class});
     } catch (NoSuchMethodException ex) {
       throw new RuntimeException(ex);
     }
@@ -108,7 +108,7 @@
   }
   
   private static void runBinaryBench(String type, int numRecords, Times times)
-  throws IOException {
+    throws IOException {
     Record[] records = makeArray(type, numRecords, times);
     ByteArrayOutputStream bout = new ByteArrayOutputStream();
     BinaryRecordOutput rout = new BinaryRecordOutput(bout);
@@ -154,7 +154,7 @@
   }
   
   private static void runCsvBench(String type, int numRecords, Times times)
-  throws IOException {
+    throws IOException {
     Record[] records = makeArray(type, numRecords, times);
     ByteArrayOutputStream bout = new ByteArrayOutputStream();
     CsvRecordOutput rout = new CsvRecordOutput(bout);
@@ -182,7 +182,7 @@
   }
   
   private static void runXmlBench(String type, int numRecords, Times times)
-  throws IOException {
+    throws IOException {
     Record[] records = makeArray(type, numRecords, times);
     ByteArrayOutputStream bout = new ByteArrayOutputStream();
     XmlRecordOutput rout = new XmlRecordOutput(bout);
@@ -214,34 +214,34 @@
   }
 
   private static void printTimes(String type,
-      String format,
-      int numRecords,
-      Times times) {
+                                 String format,
+                                 int numRecords,
+                                 Times times) {
     System.out.println("Type: " + type + " Format: " + format +
-        " #Records: "+numRecords);
+                       " #Records: "+numRecords);
     if (times.init != 0) {
       System.out.println("Initialization Time (Per record) : "+
-          times.init/numRecords + " Nanoseconds");
+                         times.init/numRecords + " Nanoseconds");
     }
     
     if (times.serialize != 0) {
       System.out.println("Serialization Time (Per Record) : "+
-          times.serialize/numRecords + " Nanoseconds");
+                         times.serialize/numRecords + " Nanoseconds");
     }
     
     if (times.deserialize != 0) {
       System.out.println("Deserialization Time (Per Record) : "+
-          times.deserialize/numRecords + " Nanoseconds");
+                         times.deserialize/numRecords + " Nanoseconds");
     }
     
     if (times.write != 0) {
       System.out.println("Write Time (Per Record) : "+
-          times.write/numRecords + " Nanoseconds");
+                         times.write/numRecords + " Nanoseconds");
     }
     
     if (times.readFields != 0) {
       System.out.println("ReadFields Time (Per Record) : "+
-          times.readFields/numRecords + " Nanoseconds");
+                         times.readFields/numRecords + " Nanoseconds");
     }
     
     System.out.println();
@@ -257,7 +257,7 @@
   
   private static void exitOnError() {
     String usage = "RecordBench {buffer|string|int}"+
-        " {binary|csv|xml} <numRecords>";
+      " {binary|csv|xml} <numRecords>";
     System.out.println(usage);
     System.exit(1);
   }
@@ -279,9 +279,9 @@
     
     Method bench = null;
     try {
-    bench = RecordBench.class.getDeclaredMethod("run"+
-        toCamelCase(format) + "Bench",
-        new Class[] {String.class, Integer.TYPE, Times.class});
+      bench = RecordBench.class.getDeclaredMethod("run"+
+                                                  toCamelCase(format) + "Bench",
+                                                  new Class[] {String.class, Integer.TYPE, Times.class});
     } catch (NoSuchMethodException ex) {
       ex.printStackTrace();
       exitOnError();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordIO.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordIO.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordIO.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordIO.java Mon Apr 16 14:44:35 2007
@@ -32,170 +32,170 @@
  */
 public class TestRecordIO extends TestCase {
     
-    public TestRecordIO(String testName) {
-        super(testName);
-    }
+  public TestRecordIO(String testName) {
+    super(testName);
+  }
 
-    protected void setUp() throws Exception {
-    }
+  protected void setUp() throws Exception {
+  }
 
-    protected void tearDown() throws Exception {
-    }
+  protected void tearDown() throws Exception {
+  }
     
-    public void testBinary() {
-        File tmpfile;
-        try {
-            tmpfile = File.createTempFile("hadooprec", ".dat");
-            FileOutputStream ostream = new FileOutputStream(tmpfile);
-            BinaryRecordOutput out = new BinaryRecordOutput(ostream);
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(-4567);
-            r1.setLongVal(-2367L);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            RecRecord0 r0 = new RecRecord0();
-            r0.setStringVal("other random text");
-            r1.setRecordVal(r0);
-            r1.serialize(out, "");
-            ostream.close();
-            FileInputStream istream = new FileInputStream(tmpfile);
-            BinaryRecordInput in = new BinaryRecordInput(istream);
-            RecRecord1 r2 = new RecRecord1();
-            r2.deserialize(in, "");
-            istream.close();
-            tmpfile.delete();
-            assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testBinary() {
+    File tmpfile;
+    try {
+      tmpfile = File.createTempFile("hadooprec", ".dat");
+      FileOutputStream ostream = new FileOutputStream(tmpfile);
+      BinaryRecordOutput out = new BinaryRecordOutput(ostream);
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(-4567);
+      r1.setLongVal(-2367L);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      RecRecord0 r0 = new RecRecord0();
+      r0.setStringVal("other random text");
+      r1.setRecordVal(r0);
+      r1.serialize(out, "");
+      ostream.close();
+      FileInputStream istream = new FileInputStream(tmpfile);
+      BinaryRecordInput in = new BinaryRecordInput(istream);
+      RecRecord1 r2 = new RecRecord1();
+      r2.deserialize(in, "");
+      istream.close();
+      tmpfile.delete();
+      assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
     
-    public void testCsv() {
-        File tmpfile;
-        try {
-            tmpfile = File.createTempFile("hadooprec", ".txt");
-            FileOutputStream ostream = new FileOutputStream(tmpfile);
-            CsvRecordOutput out = new CsvRecordOutput(ostream);
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            RecRecord0 r0 = new RecRecord0();
-            r0.setStringVal("other random text");
-            r1.setRecordVal(r0);
-            r1.serialize(out, "");
-            ostream.close();
-            FileInputStream istream = new FileInputStream(tmpfile);
-            CsvRecordInput in = new CsvRecordInput(istream);
-            RecRecord1 r2 = new RecRecord1();
-            r2.deserialize(in, "");
-            istream.close();
-            tmpfile.delete();
-            assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
+  public void testCsv() {
+    File tmpfile;
+    try {
+      tmpfile = File.createTempFile("hadooprec", ".txt");
+      FileOutputStream ostream = new FileOutputStream(tmpfile);
+      CsvRecordOutput out = new CsvRecordOutput(ostream);
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      RecRecord0 r0 = new RecRecord0();
+      r0.setStringVal("other random text");
+      r1.setRecordVal(r0);
+      r1.serialize(out, "");
+      ostream.close();
+      FileInputStream istream = new FileInputStream(tmpfile);
+      CsvRecordInput in = new CsvRecordInput(istream);
+      RecRecord1 r2 = new RecRecord1();
+      r2.deserialize(in, "");
+      istream.close();
+      tmpfile.delete();
+      assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
             
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        }
+    } catch (IOException ex) {
+      ex.printStackTrace();
     }
+  }
 
-    public void testToString() {
-      try {
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            byte[] barr = new byte[256];
-            for (int idx = 0; idx < 256; idx++) {
-              barr[idx] = (byte) idx;
-            }
-            r1.setBufferVal(new Buffer(barr));
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            RecRecord0 r0 = new RecRecord0();
-            r0.setStringVal("other random text");
-            r1.setRecordVal(r0);
-            System.err.println("Illustrating toString bug"+r1.toString());
-            System.err.println("Illustrating toString bug"+r1.toString());
-        } catch (Throwable ex) {
-            assertTrue("Record.toString cannot be invoked twice in succession."+
-                "This bug has been fixed in the latest version.", false);
-        }
-    }
-    
-    public void testXml() {
-        File tmpfile;
-        try {
-            tmpfile = File.createTempFile("hadooprec", ".xml");
-            FileOutputStream ostream = new FileOutputStream(tmpfile);
-            XmlRecordOutput out = new XmlRecordOutput(ostream);
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("ran\002dom &lt; %text<&more\uffff");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            RecRecord0 r0 = new RecRecord0();
-            r0.setStringVal("other %rando\007m &amp; >&more text");
-            r1.setRecordVal(r0);
-            r1.serialize(out, "");
-            ostream.close();
-            FileInputStream istream = new FileInputStream(tmpfile);
-            XmlRecordInput in = new XmlRecordInput(istream);
-            RecRecord1 r2 = new RecRecord1();
-            r2.deserialize(in, "");
-            istream.close();
-            tmpfile.delete();
-            assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
+  public void testToString() {
+    try {
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      byte[] barr = new byte[256];
+      for (int idx = 0; idx < 256; idx++) {
+        barr[idx] = (byte) idx;
+      }
+      r1.setBufferVal(new Buffer(barr));
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      RecRecord0 r0 = new RecRecord0();
+      r0.setStringVal("other random text");
+      r1.setRecordVal(r0);
+      System.err.println("Illustrating toString bug"+r1.toString());
+      System.err.println("Illustrating toString bug"+r1.toString());
+    } catch (Throwable ex) {
+      assertTrue("Record.toString cannot be invoked twice in succession."+
+                 "This bug has been fixed in the latest version.", false);
     }
+  }
     
-    public void testCloneable() {
+  public void testXml() {
+    File tmpfile;
+    try {
+      tmpfile = File.createTempFile("hadooprec", ".xml");
+      FileOutputStream ostream = new FileOutputStream(tmpfile);
+      XmlRecordOutput out = new XmlRecordOutput(ostream);
       RecRecord1 r1 = new RecRecord1();
       r1.setBoolVal(true);
       r1.setByteVal((byte)0x66);
       r1.setFloatVal(3.145F);
       r1.setDoubleVal(1.5234);
-      r1.setIntVal(-4567);
-      r1.setLongVal(-2367L);
-      r1.setStringVal("random text");
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("ran\002dom &lt; %text<&more\uffff");
       r1.setBufferVal(new Buffer());
       r1.setVectorVal(new ArrayList<String>());
       r1.setMapVal(new TreeMap<String,String>());
       RecRecord0 r0 = new RecRecord0();
-      r0.setStringVal("other random text");
+      r0.setStringVal("other %rando\007m &amp; >&more text");
       r1.setRecordVal(r0);
-      try {
-        RecRecord1 r2 = (RecRecord1) r1.clone();
-        assertTrue("Cloneable semantics violated. r1==r2", r1 != r2);
-        assertTrue("Cloneable semantics violated. r1.getClass() != r2.getClass()",
-            r1.getClass() == r2.getClass());
-        assertTrue("Cloneable semantics violated. !r2.equals(r1)", r2.equals(r1));
-      } catch (final CloneNotSupportedException ex) {
-        ex.printStackTrace();
-      }
+      r1.serialize(out, "");
+      ostream.close();
+      FileInputStream istream = new FileInputStream(tmpfile);
+      XmlRecordInput in = new XmlRecordInput(istream);
+      RecRecord1 r2 = new RecRecord1();
+      r2.deserialize(in, "");
+      istream.close();
+      tmpfile.delete();
+      assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
+    
+  public void testCloneable() {
+    RecRecord1 r1 = new RecRecord1();
+    r1.setBoolVal(true);
+    r1.setByteVal((byte)0x66);
+    r1.setFloatVal(3.145F);
+    r1.setDoubleVal(1.5234);
+    r1.setIntVal(-4567);
+    r1.setLongVal(-2367L);
+    r1.setStringVal("random text");
+    r1.setBufferVal(new Buffer());
+    r1.setVectorVal(new ArrayList<String>());
+    r1.setMapVal(new TreeMap<String,String>());
+    RecRecord0 r0 = new RecRecord0();
+    r0.setStringVal("other random text");
+    r1.setRecordVal(r0);
+    try {
+      RecRecord1 r2 = (RecRecord1) r1.clone();
+      assertTrue("Cloneable semantics violated. r1==r2", r1 != r2);
+      assertTrue("Cloneable semantics violated. r1.getClass() != r2.getClass()",
+                 r1.getClass() == r2.getClass());
+      assertTrue("Cloneable semantics violated. !r2.equals(r1)", r2.equals(r1));
+    } catch (final CloneNotSupportedException ex) {
+      ex.printStackTrace();
     }
+  }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordMR.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordMR.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordMR.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/TestRecordMR.java Mon Apr 16 14:44:35 2007
@@ -62,392 +62,392 @@
  *
  **********************************************************/
 public class TestRecordMR extends TestCase {
-    /**
-     * Modified to make it a junit test.
-     * The RandomGen Job does the actual work of creating
-     * a huge file of assorted numbers.  It receives instructions
-     * as to how many times each number should be counted.  Then
-     * it emits those numbers in a crazy order.
-     *
-     * The map() function takes a key/val pair that describes
-     * a value-to-be-emitted (the key) and how many times it 
-     * should be emitted (the value), aka "numtimes".  map() then
-     * emits a series of intermediate key/val pairs.  It emits
-     * 'numtimes' of these.  The key is a random number and the
-     * value is the 'value-to-be-emitted'.
-     *
-     * The system collates and merges these pairs according to
-     * the random number.  reduce() function takes in a key/value
-     * pair that consists of a crazy random number and a series
-     * of values that should be emitted.  The random number key
-     * is now dropped, and reduce() emits a pair for every intermediate value.
-     * The emitted key is an intermediate value.  The emitted value
-     * is just a blank string.  Thus, we've created a huge file
-     * of numbers in random order, but where each number appears
-     * as many times as we were instructed.
-     */
-    static public class RandomGenMapper implements Mapper {
-        Random r = new Random();
-        public void configure(JobConf job) {
-        }
+  /**
+   * Modified to make it a junit test.
+   * The RandomGen Job does the actual work of creating
+   * a huge file of assorted numbers.  It receives instructions
+   * as to how many times each number should be counted.  Then
+   * it emits those numbers in a crazy order.
+   *
+   * The map() function takes a key/val pair that describes
+   * a value-to-be-emitted (the key) and how many times it 
+   * should be emitted (the value), aka "numtimes".  map() then
+   * emits a series of intermediate key/val pairs.  It emits
+   * 'numtimes' of these.  The key is a random number and the
+   * value is the 'value-to-be-emitted'.
+   *
+   * The system collates and merges these pairs according to
+   * the random number.  reduce() function takes in a key/value
+   * pair that consists of a crazy random number and a series
+   * of values that should be emitted.  The random number key
+   * is now dropped, and reduce() emits a pair for every intermediate value.
+   * The emitted key is an intermediate value.  The emitted value
+   * is just a blank string.  Thus, we've created a huge file
+   * of numbers in random order, but where each number appears
+   * as many times as we were instructed.
+   */
+  static public class RandomGenMapper implements Mapper {
+    Random r = new Random();
+    public void configure(JobConf job) {
+    }
 
-        public void map(WritableComparable key, Writable val, OutputCollector out, Reporter reporter) throws IOException {
-            int randomVal = ((RecInt) key).getData();
-            int randomCount = ((RecInt) val).getData();
-
-            for (int i = 0; i < randomCount; i++) {
-                out.collect(new RecInt(Math.abs(r.nextInt())),
-                        new RecString(Integer.toString(randomVal)));
-            }
-        }
-        public void close() {
-        }
+    public void map(WritableComparable key, Writable val, OutputCollector out, Reporter reporter) throws IOException {
+      int randomVal = ((RecInt) key).getData();
+      int randomCount = ((RecInt) val).getData();
+
+      for (int i = 0; i < randomCount; i++) {
+        out.collect(new RecInt(Math.abs(r.nextInt())),
+                    new RecString(Integer.toString(randomVal)));
+      }
+    }
+    public void close() {
+    }
+  }
+  /**
+   */
+  static public class RandomGenReducer implements Reducer {
+    public void configure(JobConf job) {
     }
-    /**
-     */
-    static public class RandomGenReducer implements Reducer {
-        public void configure(JobConf job) {
-        }
 
-        public void reduce(WritableComparable key,
-                Iterator it,
-                OutputCollector out,
-                Reporter reporter)
-                throws IOException {
-            int keyint = ((RecInt) key).getData();
-            while (it.hasNext()) {
-                String val = ((RecString) it.next()).getData();
-                out.collect(new RecInt(Integer.parseInt(val)),
-                        new RecString(""));
-            }
-        }
-        public void close() {
-        }
+    public void reduce(WritableComparable key,
+                       Iterator it,
+                       OutputCollector out,
+                       Reporter reporter)
+      throws IOException {
+      int keyint = ((RecInt) key).getData();
+      while (it.hasNext()) {
+        String val = ((RecString) it.next()).getData();
+        out.collect(new RecInt(Integer.parseInt(val)),
+                    new RecString(""));
+      }
     }
+    public void close() {
+    }
+  }
 
-    /**
-     * The RandomCheck Job does a lot of our work.  It takes
-     * in a num/string keyspace, and transforms it into a
-     * key/count(int) keyspace.
-     *
-     * The map() function just emits a num/1 pair for every
-     * num/string input pair.
-     *
-     * The reduce() function sums up all the 1s that were
-     * emitted for a single key.  It then emits the key/total
-     * pair.
-     *
-     * This is used to regenerate the random number "answer key".
-     * Each key here is a random number, and the count is the
-     * number of times the number was emitted.
-     */
-    static public class RandomCheckMapper implements Mapper {
-        public void configure(JobConf job) {
-        }
+  /**
+   * The RandomCheck Job does a lot of our work.  It takes
+   * in a num/string keyspace, and transforms it into a
+   * key/count(int) keyspace.
+   *
+   * The map() function just emits a num/1 pair for every
+   * num/string input pair.
+   *
+   * The reduce() function sums up all the 1s that were
+   * emitted for a single key.  It then emits the key/total
+   * pair.
+   *
+   * This is used to regenerate the random number "answer key".
+   * Each key here is a random number, and the count is the
+   * number of times the number was emitted.
+   */
+  static public class RandomCheckMapper implements Mapper {
+    public void configure(JobConf job) {
+    }
 
-        public void map(WritableComparable key, Writable val, OutputCollector out, Reporter reporter) throws IOException {
-            int pos = ((RecInt) key).getData();
-            String str = ((RecString) val).getData();
-            out.collect(new RecInt(pos), new RecString("1"));
-        }
-        public void close() {
-        }
+    public void map(WritableComparable key, Writable val, OutputCollector out, Reporter reporter) throws IOException {
+      int pos = ((RecInt) key).getData();
+      String str = ((RecString) val).getData();
+      out.collect(new RecInt(pos), new RecString("1"));
+    }
+    public void close() {
+    }
+  }
+  /**
+   */
+  static public class RandomCheckReducer implements Reducer {
+    public void configure(JobConf job) {
     }
-    /**
-     */
-    static public class RandomCheckReducer implements Reducer {
-        public void configure(JobConf job) {
-        }
         
-        public void reduce(WritableComparable key, Iterator it, OutputCollector out, Reporter reporter) throws IOException {
-            int keyint = ((RecInt) key).getData();
-            int count = 0;
-            while (it.hasNext()) {
-                it.next();
-                count++;
-            }
-            out.collect(new RecInt(keyint), new RecString(Integer.toString(count)));
-        }
-        public void close() {
-        }
+    public void reduce(WritableComparable key, Iterator it, OutputCollector out, Reporter reporter) throws IOException {
+      int keyint = ((RecInt) key).getData();
+      int count = 0;
+      while (it.hasNext()) {
+        it.next();
+        count++;
+      }
+      out.collect(new RecInt(keyint), new RecString(Integer.toString(count)));
+    }
+    public void close() {
     }
+  }
 
-    /**
-     * The Merge Job is a really simple one.  It takes in
-     * an int/int key-value set, and emits the same set.
-     * But it merges identical keys by adding their values.
-     *
-     * Thus, the map() function is just the identity function
-     * and reduce() just sums.  Nothing to see here!
-     */
-    static public class MergeMapper implements Mapper {
-        public void configure(JobConf job) {
-        }
+  /**
+   * The Merge Job is a really simple one.  It takes in
+   * an int/int key-value set, and emits the same set.
+   * But it merges identical keys by adding their values.
+   *
+   * Thus, the map() function is just the identity function
+   * and reduce() just sums.  Nothing to see here!
+   */
+  static public class MergeMapper implements Mapper {
+    public void configure(JobConf job) {
+    }
 
-        public void map(WritableComparable key, Writable val, OutputCollector out, Reporter reporter) throws IOException {
-            int keyint = ((RecInt) key).getData();
-            String valstr = ((RecString) val).getData();
-            out.collect(new RecInt(keyint), new RecInt(Integer.parseInt(valstr)));
-        }
-        public void close() {
-        }
+    public void map(WritableComparable key, Writable val, OutputCollector out, Reporter reporter) throws IOException {
+      int keyint = ((RecInt) key).getData();
+      String valstr = ((RecString) val).getData();
+      out.collect(new RecInt(keyint), new RecInt(Integer.parseInt(valstr)));
+    }
+    public void close() {
+    }
+  }
+  static public class MergeReducer implements Reducer {
+    public void configure(JobConf job) {
     }
-    static public class MergeReducer implements Reducer {
-        public void configure(JobConf job) {
-        }
         
-        public void reduce(WritableComparable key, Iterator it, OutputCollector out, Reporter reporter) throws IOException {
-            int keyint = ((RecInt) key).getData();
-            int total = 0;
-            while (it.hasNext()) {
-                total += ((RecInt) it.next()).getData();
-            }
-            out.collect(new RecInt(keyint), new RecInt(total));
-        }
-        public void close() {
-        }
+    public void reduce(WritableComparable key, Iterator it, OutputCollector out, Reporter reporter) throws IOException {
+      int keyint = ((RecInt) key).getData();
+      int total = 0;
+      while (it.hasNext()) {
+        total += ((RecInt) it.next()).getData();
+      }
+      out.collect(new RecInt(keyint), new RecInt(total));
+    }
+    public void close() {
     }
+  }
 
-    private static int range = 10;
-    private static int counts = 100;
-    private static Random r = new Random();
-    private static Configuration conf = new Configuration();
-
-    public void testMapred() throws Exception {
-	launch();
-    }
-
-    /**
-     * 
-     */
-    public static void launch() throws Exception {
-        //
-        // Generate distribution of ints.  This is the answer key.
-        //
-        int countsToGo = counts;
-        int dist[] = new int[range];
-        for (int i = 0; i < range; i++) {
-            double avgInts = (1.0 * countsToGo) / (range - i);
-            dist[i] = (int) Math.max(0, Math.round(avgInts + (Math.sqrt(avgInts) * r.nextGaussian())));
-            countsToGo -= dist[i];
-        }
-        if (countsToGo > 0) {
-            dist[dist.length-1] += countsToGo;
-        }
+  private static int range = 10;
+  private static int counts = 100;
+  private static Random r = new Random();
+  private static Configuration conf = new Configuration();
+
+  public void testMapred() throws Exception {
+    launch();
+  }
+
+  /**
+   * 
+   */
+  public static void launch() throws Exception {
+    //
+    // Generate distribution of ints.  This is the answer key.
+    //
+    int countsToGo = counts;
+    int dist[] = new int[range];
+    for (int i = 0; i < range; i++) {
+      double avgInts = (1.0 * countsToGo) / (range - i);
+      dist[i] = (int) Math.max(0, Math.round(avgInts + (Math.sqrt(avgInts) * r.nextGaussian())));
+      countsToGo -= dist[i];
+    }
+    if (countsToGo > 0) {
+      dist[dist.length-1] += countsToGo;
+    }
 
-        //
-        // Write the answer key to a file.  
-        //
-        FileSystem fs = FileSystem.get(conf);
-        Path testdir = new Path("mapred.loadtest");
-        if (!fs.mkdirs(testdir)) {
-          throw new IOException("Mkdirs failed to create directory " + testdir.toString());
-        }
+    //
+    // Write the answer key to a file.  
+    //
+    FileSystem fs = FileSystem.get(conf);
+    Path testdir = new Path("mapred.loadtest");
+    if (!fs.mkdirs(testdir)) {
+      throw new IOException("Mkdirs failed to create directory " + testdir.toString());
+    }
 
-        Path randomIns = new Path(testdir, "genins");
-        if (!fs.mkdirs(randomIns)) {
-          throw new IOException("Mkdirs failed to create directory " + randomIns.toString());
-        }
+    Path randomIns = new Path(testdir, "genins");
+    if (!fs.mkdirs(randomIns)) {
+      throw new IOException("Mkdirs failed to create directory " + randomIns.toString());
+    }
 
-        Path answerkey = new Path(randomIns, "answer.key");
-        SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, 
-            answerkey, RecInt.class, RecInt.class, 
-            CompressionType.NONE);
-        try {
-            for (int i = 0; i < range; i++) {
-                RecInt k = new RecInt();
-                RecInt v = new RecInt();
-                k.setData(i);
-                v.setData(dist[i]);
-                out.append(k, v);
-            }
-        } finally {
-            out.close();
-        }
+    Path answerkey = new Path(randomIns, "answer.key");
+    SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, 
+                                                        answerkey, RecInt.class, RecInt.class, 
+                                                        CompressionType.NONE);
+    try {
+      for (int i = 0; i < range; i++) {
+        RecInt k = new RecInt();
+        RecInt v = new RecInt();
+        k.setData(i);
+        v.setData(dist[i]);
+        out.append(k, v);
+      }
+    } finally {
+      out.close();
+    }
 
-        //
-        // Now we need to generate the random numbers according to
-        // the above distribution.
-        //
-        // We create a lot of map tasks, each of which takes at least
-        // one "line" of the distribution.  (That is, a certain number
-        // X is to be generated Y number of times.)
-        //
-        // A map task emits Y key/val pairs.  The val is X.  The key
-        // is a randomly-generated number.
-        //
-        // The reduce task gets its input sorted by key.  That is, sorted
-        // in random order.  It then emits a single line of text that
-        // for the given values.  It does not emit the key.
-        //
-        // Because there's just one reduce task, we emit a single big
-        // file of random numbers.
-        //
-        Path randomOuts = new Path(testdir, "genouts");
-        fs.delete(randomOuts);
-
-
-        JobConf genJob = new JobConf(conf,TestRecordMR.class);
-        genJob.setInputPath(randomIns);
-        genJob.setInputKeyClass(RecInt.class);
-        genJob.setInputValueClass(RecInt.class);
-        genJob.setInputFormat(SequenceFileInputFormat.class);
-        genJob.setMapperClass(RandomGenMapper.class);
-
-        genJob.setOutputPath(randomOuts);
-        genJob.setOutputKeyClass(RecInt.class);
-        genJob.setOutputValueClass(RecString.class);
-        genJob.setOutputFormat(SequenceFileOutputFormat.class);
-        genJob.setReducerClass(RandomGenReducer.class);
-        genJob.setNumReduceTasks(1);
-
-        JobClient.runJob(genJob);
-
-        //
-        // Next, we read the big file in and regenerate the 
-        // original map.  It's split into a number of parts.
-        // (That number is 'intermediateReduces'.)
-        //
-        // We have many map tasks, each of which read at least one
-        // of the output numbers.  For each number read in, the
-        // map task emits a key/value pair where the key is the
-        // number and the value is "1".
-        //
-        // We have a single reduce task, which receives its input
-        // sorted by the key emitted above.  For each key, there will
-        // be a certain number of "1" values.  The reduce task sums
-        // these values to compute how many times the given key was
-        // emitted.
-        //
-        // The reduce task then emits a key/val pair where the key
-        // is the number in question, and the value is the number of
-        // times the key was emitted.  This is the same format as the
-        // original answer key (except that numbers emitted zero times
-        // will not appear in the regenerated key.)  The answer set
-        // is split into a number of pieces.  A final MapReduce job
-        // will merge them.
-        //
-        // There's not really a need to go to 10 reduces here 
-        // instead of 1.  But we want to test what happens when
-        // you have multiple reduces at once.
-        //
-        int intermediateReduces = 10;
-        Path intermediateOuts = new Path(testdir, "intermediateouts");
-        fs.delete(intermediateOuts);
-        JobConf checkJob = new JobConf(conf,TestRecordMR.class);
-        checkJob.setInputPath(randomOuts);
-        checkJob.setInputKeyClass(RecInt.class);
-        checkJob.setInputValueClass(RecString.class);
-        checkJob.setInputFormat(SequenceFileInputFormat.class);
-        checkJob.setMapperClass(RandomCheckMapper.class);
-
-        checkJob.setOutputPath(intermediateOuts);
-        checkJob.setOutputKeyClass(RecInt.class);
-        checkJob.setOutputValueClass(RecString.class);
-        checkJob.setOutputFormat(SequenceFileOutputFormat.class);
-        checkJob.setReducerClass(RandomCheckReducer.class);
-        checkJob.setNumReduceTasks(intermediateReduces);
-
-        JobClient.runJob(checkJob);
-
-        //
-        // OK, now we take the output from the last job and
-        // merge it down to a single file.  The map() and reduce()
-        // functions don't really do anything except reemit tuples.
-        // But by having a single reduce task here, we end up merging
-        // all the files.
-        //
-        Path finalOuts = new Path(testdir, "finalouts");        
-        fs.delete(finalOuts);
-        JobConf mergeJob = new JobConf(conf,TestRecordMR.class);
-        mergeJob.setInputPath(intermediateOuts);
-        mergeJob.setInputKeyClass(RecInt.class);
-        mergeJob.setInputValueClass(RecString.class);
-        mergeJob.setInputFormat(SequenceFileInputFormat.class);
-        mergeJob.setMapperClass(MergeMapper.class);
+    //
+    // Now we need to generate the random numbers according to
+    // the above distribution.
+    //
+    // We create a lot of map tasks, each of which takes at least
+    // one "line" of the distribution.  (That is, a certain number
+    // X is to be generated Y number of times.)
+    //
+    // A map task emits Y key/val pairs.  The val is X.  The key
+    // is a randomly-generated number.
+    //
+    // The reduce task gets its input sorted by key.  That is, sorted
+    // in random order.  It then emits a single line of text that
+    // for the given values.  It does not emit the key.
+    //
+    // Because there's just one reduce task, we emit a single big
+    // file of random numbers.
+    //
+    Path randomOuts = new Path(testdir, "genouts");
+    fs.delete(randomOuts);
+
+
+    JobConf genJob = new JobConf(conf,TestRecordMR.class);
+    genJob.setInputPath(randomIns);
+    genJob.setInputKeyClass(RecInt.class);
+    genJob.setInputValueClass(RecInt.class);
+    genJob.setInputFormat(SequenceFileInputFormat.class);
+    genJob.setMapperClass(RandomGenMapper.class);
+
+    genJob.setOutputPath(randomOuts);
+    genJob.setOutputKeyClass(RecInt.class);
+    genJob.setOutputValueClass(RecString.class);
+    genJob.setOutputFormat(SequenceFileOutputFormat.class);
+    genJob.setReducerClass(RandomGenReducer.class);
+    genJob.setNumReduceTasks(1);
+
+    JobClient.runJob(genJob);
+
+    //
+    // Next, we read the big file in and regenerate the 
+    // original map.  It's split into a number of parts.
+    // (That number is 'intermediateReduces'.)
+    //
+    // We have many map tasks, each of which read at least one
+    // of the output numbers.  For each number read in, the
+    // map task emits a key/value pair where the key is the
+    // number and the value is "1".
+    //
+    // We have a single reduce task, which receives its input
+    // sorted by the key emitted above.  For each key, there will
+    // be a certain number of "1" values.  The reduce task sums
+    // these values to compute how many times the given key was
+    // emitted.
+    //
+    // The reduce task then emits a key/val pair where the key
+    // is the number in question, and the value is the number of
+    // times the key was emitted.  This is the same format as the
+    // original answer key (except that numbers emitted zero times
+    // will not appear in the regenerated key.)  The answer set
+    // is split into a number of pieces.  A final MapReduce job
+    // will merge them.
+    //
+    // There's not really a need to go to 10 reduces here 
+    // instead of 1.  But we want to test what happens when
+    // you have multiple reduces at once.
+    //
+    int intermediateReduces = 10;
+    Path intermediateOuts = new Path(testdir, "intermediateouts");
+    fs.delete(intermediateOuts);
+    JobConf checkJob = new JobConf(conf,TestRecordMR.class);
+    checkJob.setInputPath(randomOuts);
+    checkJob.setInputKeyClass(RecInt.class);
+    checkJob.setInputValueClass(RecString.class);
+    checkJob.setInputFormat(SequenceFileInputFormat.class);
+    checkJob.setMapperClass(RandomCheckMapper.class);
+
+    checkJob.setOutputPath(intermediateOuts);
+    checkJob.setOutputKeyClass(RecInt.class);
+    checkJob.setOutputValueClass(RecString.class);
+    checkJob.setOutputFormat(SequenceFileOutputFormat.class);
+    checkJob.setReducerClass(RandomCheckReducer.class);
+    checkJob.setNumReduceTasks(intermediateReduces);
+
+    JobClient.runJob(checkJob);
+
+    //
+    // OK, now we take the output from the last job and
+    // merge it down to a single file.  The map() and reduce()
+    // functions don't really do anything except reemit tuples.
+    // But by having a single reduce task here, we end up merging
+    // all the files.
+    //
+    Path finalOuts = new Path(testdir, "finalouts");        
+    fs.delete(finalOuts);
+    JobConf mergeJob = new JobConf(conf,TestRecordMR.class);
+    mergeJob.setInputPath(intermediateOuts);
+    mergeJob.setInputKeyClass(RecInt.class);
+    mergeJob.setInputValueClass(RecString.class);
+    mergeJob.setInputFormat(SequenceFileInputFormat.class);
+    mergeJob.setMapperClass(MergeMapper.class);
         
-        mergeJob.setOutputPath(finalOuts);
-        mergeJob.setOutputKeyClass(RecInt.class);
-        mergeJob.setOutputValueClass(RecInt.class);
-        mergeJob.setOutputFormat(SequenceFileOutputFormat.class);
-        mergeJob.setReducerClass(MergeReducer.class);
-        mergeJob.setNumReduceTasks(1);
+    mergeJob.setOutputPath(finalOuts);
+    mergeJob.setOutputKeyClass(RecInt.class);
+    mergeJob.setOutputValueClass(RecInt.class);
+    mergeJob.setOutputFormat(SequenceFileOutputFormat.class);
+    mergeJob.setReducerClass(MergeReducer.class);
+    mergeJob.setNumReduceTasks(1);
         
-        JobClient.runJob(mergeJob);
+    JobClient.runJob(mergeJob);
         
  
-        //
-        // Finally, we compare the reconstructed answer key with the
-        // original one.  Remember, we need to ignore zero-count items
-        // in the original key.
-        //
-        boolean success = true;
-        Path recomputedkey = new Path(finalOuts, "part-00000");
-        SequenceFile.Reader in = new SequenceFile.Reader(fs, recomputedkey, conf);
-        int totalseen = 0;
-        try {
-            RecInt key = new RecInt();
-            RecInt val = new RecInt();            
-            for (int i = 0; i < range; i++) {
-                if (dist[i] == 0) {
-                    continue;
-                }
-                if (! in.next(key, val)) {
-                    System.err.println("Cannot read entry " + i);
-                    success = false;
-                    break;
-                } else {
-                    if ( !((key.getData() == i ) && (val.getData() == dist[i]))) {
-                        System.err.println("Mismatch!  Pos=" + key.getData() + ", i=" + i + ", val=" + val.getData() + ", dist[i]=" + dist[i]);
-                        success = false;
-                    }
-                    totalseen += val.getData();
-                }
-            }
-            if (success) {
-                if (in.next(key, val)) {
-                    System.err.println("Unnecessary lines in recomputed key!");
-                    success = false;
-                }
-            }
-        } finally {
-            in.close();
-        }
-        int originalTotal = 0;
-        for (int i = 0; i < dist.length; i++) {
-            originalTotal += dist[i];
-        }
-        System.out.println("Original sum: " + originalTotal);
-        System.out.println("Recomputed sum: " + totalseen);
-
-        //
-        // Write to "results" whether the test succeeded or not.
-        //
-        Path resultFile = new Path(testdir, "results");
-        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fs.create(resultFile)));
-        try {
-            bw.write("Success=" + success + "\n");
-            System.out.println("Success=" + success);            
-        } finally {
-            bw.close();
-        }
-	fs.delete(testdir);
+    //
+    // Finally, we compare the reconstructed answer key with the
+    // original one.  Remember, we need to ignore zero-count items
+    // in the original key.
+    //
+    boolean success = true;
+    Path recomputedkey = new Path(finalOuts, "part-00000");
+    SequenceFile.Reader in = new SequenceFile.Reader(fs, recomputedkey, conf);
+    int totalseen = 0;
+    try {
+      RecInt key = new RecInt();
+      RecInt val = new RecInt();            
+      for (int i = 0; i < range; i++) {
+        if (dist[i] == 0) {
+          continue;
+        }
+        if (! in.next(key, val)) {
+          System.err.println("Cannot read entry " + i);
+          success = false;
+          break;
+        } else {
+          if ( !((key.getData() == i ) && (val.getData() == dist[i]))) {
+            System.err.println("Mismatch!  Pos=" + key.getData() + ", i=" + i + ", val=" + val.getData() + ", dist[i]=" + dist[i]);
+            success = false;
+          }
+          totalseen += val.getData();
+        }
+      }
+      if (success) {
+        if (in.next(key, val)) {
+          System.err.println("Unnecessary lines in recomputed key!");
+          success = false;
+        }
+      }
+    } finally {
+      in.close();
+    }
+    int originalTotal = 0;
+    for (int i = 0; i < dist.length; i++) {
+      originalTotal += dist[i];
     }
+    System.out.println("Original sum: " + originalTotal);
+    System.out.println("Recomputed sum: " + totalseen);
 
-    /**
-     * Launches all the tasks in order.
-     */
-    public static void main(String[] argv) throws Exception {
-        if (argv.length < 2) {
-            System.err.println("Usage: TestRecordMR <range> <counts>");
-            System.err.println();
-            System.err.println("Note: a good test will have a <counts> value that is substantially larger than the <range>");
-            return;
-        }
+    //
+    // Write to "results" whether the test succeeded or not.
+    //
+    Path resultFile = new Path(testdir, "results");
+    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fs.create(resultFile)));
+    try {
+      bw.write("Success=" + success + "\n");
+      System.out.println("Success=" + success);            
+    } finally {
+      bw.close();
+    }
+    fs.delete(testdir);
+  }
 
-        int i = 0;
-        int range = Integer.parseInt(argv[i++]);
-        int counts = Integer.parseInt(argv[i++]);
-	launch();
+  /**
+   * Launches all the tasks in order.
+   */
+  public static void main(String[] argv) throws Exception {
+    if (argv.length < 2) {
+      System.err.println("Usage: TestRecordMR <range> <counts>");
+      System.err.println();
+      System.err.println("Note: a good test will have a <counts> value that is substantially larger than the <range>");
+      return;
     }
+
+    int i = 0;
+    int range = Integer.parseInt(argv[i++]);
+    int counts = Integer.parseInt(argv[i++]);
+    launch();
+  }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/ToCpp.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/ToCpp.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/ToCpp.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/ToCpp.java Mon Apr 16 14:44:35 2007
@@ -31,85 +31,85 @@
  */
 public class ToCpp extends TestCase {
     
-    public ToCpp(String testName) {
-        super(testName);
-    }
+  public ToCpp(String testName) {
+    super(testName);
+  }
 
-    protected void setUp() throws Exception {
-    }
+  protected void setUp() throws Exception {
+  }
 
-    protected void tearDown() throws Exception {
-    }
+  protected void tearDown() throws Exception {
+  }
     
-    public void testBinary() {
-        File tmpfile;
-        try {
-            tmpfile = new File("/tmp/hadooptemp.dat");
-            FileOutputStream ostream = new FileOutputStream(tmpfile);
-            BinaryRecordOutput out = new BinaryRecordOutput(ostream);
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            r1.serialize(out, "");
-            ostream.close();
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testBinary() {
+    File tmpfile;
+    try {
+      tmpfile = new File("/tmp/hadooptemp.dat");
+      FileOutputStream ostream = new FileOutputStream(tmpfile);
+      BinaryRecordOutput out = new BinaryRecordOutput(ostream);
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      r1.serialize(out, "");
+      ostream.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
     
-    public void testCsv() {
-        File tmpfile;
-        try {
-            tmpfile = new File("/tmp/hadooptemp.txt");
-            FileOutputStream ostream = new FileOutputStream(tmpfile);
-            CsvRecordOutput out = new CsvRecordOutput(ostream);
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            r1.serialize(out, "");
-            ostream.close();
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testCsv() {
+    File tmpfile;
+    try {
+      tmpfile = new File("/tmp/hadooptemp.txt");
+      FileOutputStream ostream = new FileOutputStream(tmpfile);
+      CsvRecordOutput out = new CsvRecordOutput(ostream);
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      r1.serialize(out, "");
+      ostream.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
 
-    public void testXml() {
-        File tmpfile;
-        try {
-            tmpfile = new File("/tmp/hadooptemp.xml");
-            FileOutputStream ostream = new FileOutputStream(tmpfile);
-            XmlRecordOutput out = new XmlRecordOutput(ostream);
-            RecRecord1 r1 = new RecRecord1();
-            r1.setBoolVal(true);
-            r1.setByteVal((byte)0x66);
-            r1.setFloatVal(3.145F);
-            r1.setDoubleVal(1.5234);
-            r1.setIntVal(4567);
-            r1.setLongVal(0x5a5a5a5a5a5aL);
-            r1.setStringVal("random text");
-            r1.setBufferVal(new Buffer());
-            r1.setVectorVal(new ArrayList<String>());
-            r1.setMapVal(new TreeMap<String,String>());
-            r1.serialize(out, "");
-            ostream.close();
-        } catch (IOException ex) {
-            ex.printStackTrace();
-        } 
-    }
+  public void testXml() {
+    File tmpfile;
+    try {
+      tmpfile = new File("/tmp/hadooptemp.xml");
+      FileOutputStream ostream = new FileOutputStream(tmpfile);
+      XmlRecordOutput out = new XmlRecordOutput(ostream);
+      RecRecord1 r1 = new RecRecord1();
+      r1.setBoolVal(true);
+      r1.setByteVal((byte)0x66);
+      r1.setFloatVal(3.145F);
+      r1.setDoubleVal(1.5234);
+      r1.setIntVal(4567);
+      r1.setLongVal(0x5a5a5a5a5a5aL);
+      r1.setStringVal("random text");
+      r1.setBufferVal(new Buffer());
+      r1.setVectorVal(new ArrayList<String>());
+      r1.setMapVal(new TreeMap<String,String>());
+      r1.serialize(out, "");
+      ostream.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } 
+  }
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/test/AllTestDriver.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/test/AllTestDriver.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/test/AllTestDriver.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/test/AllTestDriver.java Mon Apr 16 14:44:35 2007
@@ -60,7 +60,7 @@
       pgd.addClass("DFSCIOTest", DFSCIOTest.class, "Distributed i/o benchmark of libhdfs.");
       pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, "Distributed checkup of the file system consistency.");
       pgd.addClass("testmapredsort", SortValidator.class, 
-      "A map/reduce program that validates the map-reduce framework's sort.");
+                   "A map/reduce program that validates the map-reduce framework's sort.");
       pgd.driver(argv);
     } catch(Throwable e) {
       e.printStackTrace();