You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/06/02 21:14:24 UTC

svn commit: r411254 [2/2] - in /lucene/hadoop/trunk: ./ bin/ conf/ lib/ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hado...

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java Fri Jun  2 12:14:22 2006
@@ -16,17 +16,18 @@
 
 package org.apache.hadoop.mapred;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.*;
 
 import java.io.*;
-import java.util.logging.Logger;
 
 /** Base class for tasks. */
 abstract class Task implements Writable, Configurable {
-  private static final Logger LOG =
-    LogFormatter.getLogger("org.apache.hadoop.mapred.TaskRunner");
+  private static final Log LOG =
+    LogFactory.getLog("org.apache.hadoop.mapred.TaskRunner");
 
   ////////////////////////////////////////////
   // Fields
@@ -116,7 +117,7 @@
         try {
           umbilical.progress(getTaskId(), progress, status);
         } catch (IOException ie) {
-          LOG.warning(StringUtils.stringifyException(ie));
+          LOG.warn(StringUtils.stringifyException(ie));
         }
       }
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Fri Jun  2 12:14:22 2006
@@ -15,11 +15,10 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.hadoop.util.LogFormatter;
+import org.apache.commons.logging.*;
 
 import java.text.NumberFormat;
 import java.util.*;
-import java.util.logging.*;
 
 
 ////////////////////////////////////////////////////////
@@ -47,7 +46,7 @@
       idFormat.setGroupingUsed(false);
     }
 
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.mapred.TaskInProgress");
+    public static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.TaskInProgress");
 
     // Defines the TIP
     private String jobFile = null;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Fri Jun  2 12:14:22 2006
@@ -15,14 +15,14 @@
  */
 package org.apache.hadoop.mapred;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.conf.*;
-import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.util.*;
 
 import java.io.*;
 import java.util.jar.*;
-import java.util.logging.*;
 import java.util.Vector;
 import java.util.Enumeration;
 
@@ -31,8 +31,8 @@
  * user supplied map and reduce functions.
  */
 abstract class TaskRunner extends Thread {
-  public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.hadoop.mapred.TaskRunner");
+  public static final Log LOG =
+    LogFactory.getLog("org.apache.hadoop.mapred.TaskRunner");
 
   boolean killed = false;
   private Process process;
@@ -144,20 +144,20 @@
       // Run java
       runChild((String[])vargs.toArray(new String[0]), workDir);
     } catch (FSError e) {
-      LOG.log(Level.SEVERE, "FSError", e);
+      LOG.fatal("FSError", e);
       try {
         tracker.fsError(e.getMessage());
       } catch (IOException ie) {
-        LOG.log(Level.SEVERE, t.getTaskId()+" reporting FSError", ie);
+        LOG.fatal(t.getTaskId()+" reporting FSError", ie);
       }
     } catch (Throwable throwable) {
-      LOG.log(Level.WARNING, t.getTaskId()+" Child Error", throwable);
+      LOG.warn(t.getTaskId()+" Child Error", throwable);
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
       throwable.printStackTrace(new PrintStream(baos));
       try {
         tracker.reportDiagnosticInfo(t.getTaskId(), baos.toString());
       } catch (IOException e) {
-        LOG.log(Level.WARNING, t.getTaskId()+" Reporting Diagnostics", e);
+        LOG.warn(t.getTaskId()+" Reporting Diagnostics", e);
       }
     } finally {
       tracker.reportTaskFinished(t.getTaskId());
@@ -187,7 +187,7 @@
         javaOpts = javaOpts.substring(0, index + MX.length()) +
             heapSize + ((end < 0)? "": javaOpts.substring(end));
     }
-    LOG.warning("mapred.child.heap.size is deprecated. Use " +
+    LOG.warn("mapred.child.heap.size is deprecated. Use " +
         "mapred.child.java.opt instead. Meantime, mapred.child.heap.size " +
         "is interpolated into mapred.child.java.opt: " + javaOpts);
     return javaOpts;
@@ -270,12 +270,12 @@
         LOG.info(t.getTaskId()+" "+line);
       }
     } catch (IOException e) {
-      LOG.log(Level.WARNING, t.getTaskId()+" Error reading child output", e);
+      LOG.warn(t.getTaskId()+" Error reading child output", e);
     } finally {
       try {
         output.close();
       } catch (IOException e) {
-        LOG.log(Level.WARNING, t.getTaskId()+" Error closing child output", e);
+        LOG.warn(t.getTaskId()+" Error closing child output", e);
       }
     }
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Fri Jun  2 12:14:22 2006
@@ -15,6 +15,8 @@
  */
  package org.apache.hadoop.mapred;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.ipc.*;
 import org.apache.hadoop.util.*;
@@ -22,7 +24,6 @@
 import java.io.*;
 import java.net.*;
 import java.util.*;
-import java.util.logging.*;
 
 /*******************************************************
  * TaskTracker is a process that starts and tracks MR Tasks
@@ -42,8 +43,8 @@
     // required for unknown reason to make WritableFactories work distributed
     static { new MapTask(); new ReduceTask(); new MapOutputLocation(); }
 
-    public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.hadoop.mapred.TaskTracker");
+    public static final Log LOG =
+    LogFactory.getLog("org.apache.hadoop.mapred.TaskTracker");
 
     private boolean running = true;
 
@@ -90,7 +91,7 @@
               TaskInProgress tip = (TaskInProgress) tasksToCleanup.take();
               tip.jobHasFinished();
             } catch (Throwable except) {
-              LOG.warning(StringUtils.stringifyException(except));
+              LOG.warn(StringUtils.stringifyException(except));
             }
           }
         }
@@ -407,7 +408,7 @@
                                 staleState = true;
                             }
                         } catch (Exception ex) {
-                            LOG.log(Level.INFO, "Lost connection to JobTracker [" + jobTrackAddr + "].  Retrying...", ex);
+                            LOG.info("Lost connection to JobTracker [" + jobTrackAddr + "].  Retrying...", ex);
                             try {
                                 Thread.sleep(5000);
                             } catch (InterruptedException ie) {
@@ -689,7 +690,7 @@
               runningTasks.put(task.getTaskId(), this);
               mapTotal++;
             } else {
-              LOG.warning("Output already reported lost:"+task.getTaskId());
+              LOG.warn("Output already reported lost:"+task.getTaskId());
             }
         }
 
@@ -700,7 +701,7 @@
          */
         void cleanup() throws IOException {
             String taskId = task.getTaskId();
-            LOG.fine("Cleaning up " + taskId);
+            LOG.debug("Cleaning up " + taskId);
             synchronized (TaskTracker.this) {
                tasks.remove(taskId);
                synchronized (this) {
@@ -737,7 +738,7 @@
         if (tip != null) {
           tip.reportProgress(progress, state);
         } else {
-          LOG.warning("Progress from unknown child task: "+taskid+". Ignored.");
+          LOG.warn("Progress from unknown child task: "+taskid+". Ignored.");
         }
     }
 
@@ -750,7 +751,7 @@
         if (tip != null) {
           tip.reportDiagnosticInfo(info);
         } else {
-          LOG.warning("Error from unknown child task: "+taskid+". Ignored.");
+          LOG.warn("Error from unknown child task: "+taskid+". Ignored.");
         }
     }
 
@@ -767,14 +768,14 @@
         if (tip != null) {
           tip.reportDone();
         } else {
-          LOG.warning("Unknown child task done: "+taskid+". Ignored.");
+          LOG.warn("Unknown child task done: "+taskid+". Ignored.");
         }
     }
 
     /** A child task had a local filesystem error.  Exit, so that no future
      * jobs are accepted. */
     public synchronized void fsError(String message) throws IOException {
-      LOG.severe("FSError, exiting: "+ message);
+      LOG.fatal("FSError, exiting: "+ message);
       running = false;
     }
 
@@ -792,7 +793,7 @@
         if (tip != null) {
           tip.taskFinished();
         } else {
-          LOG.warning("Unknown child task finshed: "+taskid+". Ignored.");
+          LOG.warn("Unknown child task finshed: "+taskid+". Ignored.");
         }
     }
 
@@ -804,7 +805,7 @@
         if (tip != null) {
           tip.mapOutputLost();
         } else {
-          LOG.warning("Unknown child with bad map output: "+taskid+". Ignored.");
+          LOG.warn("Unknown child with bad map output: "+taskid+". Ignored.");
         }
     }
 
@@ -813,7 +814,7 @@
      */
     public static class Child {
         public static void main(String[] args) throws Throwable {
-          LogFormatter.showTime(false);
+          //LogFactory.showTime(false);
           LOG.info("Child starting");
 
           JobConf defaultConf = new JobConf();
@@ -836,10 +837,10 @@
             FileSystem.get(job).setWorkingDirectory(job.getWorkingDirectory());
             task.run(job, umbilical);             // run the task
           } catch (FSError e) {
-            LOG.log(Level.SEVERE, "FSError from child", e);
+            LOG.fatal("FSError from child", e);
             umbilical.fsError(e.getMessage());
           } catch (Throwable throwable) {
-              LOG.log(Level.WARNING, "Error running child", throwable);
+              LOG.warn("Error running child", throwable);
               // Report back any failures, for diagnostic purposes
               ByteArrayOutputStream baos = new ByteArrayOutputStream();
               throwable.printStackTrace(new PrintStream(baos));
@@ -857,7 +858,7 @@
                 while (true) {
                   try {
                     if (!umbilical.ping(taskid)) {
-                      LOG.log(Level.WARNING, "Parent died.  Exiting "+taskid);
+                      LOG.warn("Parent died.  Exiting "+taskid);
                       System.exit(66);
                     }
                     remainingRetries = MAX_RETRIES;
@@ -866,7 +867,7 @@
                     LOG.info("Ping exception: " + msg);
                     remainingRetries -=1;
                     if (remainingRetries == 0) {
-                      LOG.log(Level.WARNING, "Last retry, killing "+taskid);
+                      LOG.warn("Last retry, killing "+taskid);
                       System.exit(65);
                     }
                   }
@@ -922,7 +923,6 @@
         }
 
         JobConf conf=new JobConf();
-        LogFormatter.initFileHandler( conf, "tasktracker" );
         new TaskTracker(conf).run();
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java Fri Jun  2 12:14:22 2006
@@ -24,8 +24,7 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-/** Prints just the date and the log message. */
-
+/** @deprecated use {@link org.apache.commons.logging.LogFactory} instead. */
 public class LogFormatter extends Formatter {
   private static final String FORMAT = "yyMMdd HHmmss";
   private static final String NEWLINE = System.getProperty("line.separator");

Added: lucene/hadoop/trunk/src/test/log4j.properties
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/log4j.properties?rev=411254&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/test/log4j.properties (added)
+++ lucene/hadoop/trunk/src/test/log4j.properties Fri Jun  2 12:14:22 2006
@@ -0,0 +1,7 @@
+# log4j configuration used during build and unit tests
+
+log4j.rootLogger=info,stdout
+log4j.threshhold=ALL
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java Fri Jun  2 12:14:22 2006
@@ -18,11 +18,12 @@
 
 import junit.framework.TestCase;
 import junit.framework.AssertionFailedError;
+
+import org.apache.commons.logging.*;
 import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.fs.FSOutputStream;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.UTF8;
-import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.conf.Configuration;
 
 import java.io.File;
@@ -30,7 +31,6 @@
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.ListIterator;
-import java.util.logging.Logger;
 import java.util.Random;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
@@ -69,8 +69,8 @@
  * @author Paul Baclace
  */
 public class ClusterTestDFS extends TestCase implements FSConstants {
-  private static final Logger LOG =
-      LogFormatter.getLogger("org.apache.hadoop.dfs.ClusterTestDFS");
+  private static final Log LOG =
+      LogFactory.getLog("org.apache.hadoop.dfs.ClusterTestDFS");
 
   private static Configuration conf = new Configuration();
   private static int BUFFER_SIZE =

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java Fri Jun  2 12:14:22 2006
@@ -18,10 +18,12 @@
 
 import junit.framework.TestCase;
 import junit.framework.AssertionFailedError;
+
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.fs.FSOutputStream;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.UTF8;
-import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.dfs.NameNode;
 
@@ -32,8 +34,6 @@
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
-import java.util.logging.Level;
-import java.util.logging.Logger;
 
 /**
  * Test DFS logging
@@ -41,8 +41,8 @@
  * @author Hairong Kuang
  */
 public class ClusterTestDFSNamespaceLogging extends TestCase implements FSConstants {
-  private static final Logger LOG =
-      LogFormatter.getLogger("org.apache.hadoop.dfs.ClusterTestDFS");
+  private static final Log LOG =
+      LogFactory.getLog("org.apache.hadoop.dfs.ClusterTestDFS");
 
   private static Configuration conf = new Configuration();
 
@@ -337,11 +337,6 @@
 	System.setProperty("hadoop.log.dir", baseDirSpecified+"/logs");
 	conf.setInt("hadoop.logfile.count", 1);
 	conf.setInt("hadoop.logfile.size", 1000000000);
-
-	// logging configuration for namenode
-    logFile = LogFormatter.initFileHandler( conf, "namenode" );
-    LogFormatter.setShowThreadIDs(true);
-    NameNode.stateChangeLog.setLevel( Level.FINEST);
   }
   
   private void startDFS( int dataNodeNum) throws IOException {

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DFSCIOTest.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DFSCIOTest.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DFSCIOTest.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DFSCIOTest.java Fri Jun  2 12:14:22 2006
@@ -19,10 +19,11 @@
 import java.io.*;
 
 import junit.framework.TestCase;
-import java.util.logging.*;
 import java.util.Date;
 import java.util.StringTokenizer;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
@@ -65,7 +66,7 @@
   private static final String BASE_FILE_NAME = "test_io_";
   private static final String DEFAULT_RES_FILE_NAME = "DFSCIOTest_results.log";
   
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
   private static Configuration fsConfig = new Configuration();
   private static final long MEGA = 0x100000;
   private static String TEST_ROOT_DIR = System.getProperty("test.build.data","/benchmarks/DFSCIOTest");

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java Fri Jun  2 12:14:22 2006
@@ -19,12 +19,13 @@
 import java.io.*;
 
 import junit.framework.TestCase;
-import java.util.logging.*;
 import java.util.Date;
 import java.util.StringTokenizer;
 import java.util.TreeSet;
 import java.util.Vector;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
@@ -49,7 +50,7 @@
   private static final long MEGA = 0x100000;
   
   private static Configuration fsConfig = new Configuration();
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
   private static Path TEST_ROOT_DIR = new Path(System.getProperty("test.build.data","/benchmarks/DistributedFSCheck"));
   private static Path MAP_INPUT_DIR = new Path(TEST_ROOT_DIR, "map_input");
   private static Path READ_DIR = new Path(TEST_ROOT_DIR, "io_read");

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java Fri Jun  2 12:14:22 2006
@@ -19,10 +19,11 @@
 import java.io.*;
 
 import junit.framework.TestCase;
-import java.util.logging.*;
 import java.util.Date;
 import java.util.StringTokenizer;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
@@ -65,7 +66,7 @@
   private static final String BASE_FILE_NAME = "test_io_";
   private static final String DEFAULT_RES_FILE_NAME = "TestDFSIO_results.log";
   
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
   private static Configuration fsConfig = new Configuration();
   private static final long MEGA = 0x100000;
   private static String TEST_ROOT_DIR = System.getProperty("test.build.data","/benchmarks/TestDFSIO");

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java Fri Jun  2 12:14:22 2006
@@ -19,7 +19,8 @@
 import java.io.*;
 import java.util.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
+
+import org.apache.commons.logging.*;
 
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapred.lib.*;
@@ -27,7 +28,7 @@
 import org.apache.hadoop.conf.*;
 
 public class TestFileSystem extends TestCase {
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
 
   private static Configuration conf = new Configuration();
   private static int BUFFER_SIZE = conf.getInt("io.file.buffer.size", 4096);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestArrayFile.java Fri Jun  2 12:14:22 2006
@@ -18,14 +18,15 @@
 
 import java.io.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
+
+import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 
 /** Support for flat files of binary key/value pairs. */
 public class TestArrayFile extends TestCase {
-  private static Logger LOG = SequenceFile.LOG;
+  private static Log LOG = SequenceFile.LOG;
   private static String FILE =
     System.getProperty("test.build.data",".") + "/test.array";
 
@@ -51,7 +52,7 @@
   }
 
   private static RandomDatum[] generate(int count) {
-    LOG.fine("generating " + count + " records in memory");
+    LOG.debug("generating " + count + " records in debug");
     RandomDatum[] data = new RandomDatum[count];
     RandomDatum.Generator generator = new RandomDatum.Generator();
     for (int i = 0; i < count; i++) {
@@ -64,7 +65,7 @@
   private static void writeTest(FileSystem fs, RandomDatum[] data, String file)
     throws IOException {
     MapFile.delete(fs, file);
-    LOG.fine("creating with " + data.length + " records");
+    LOG.debug("creating with " + data.length + " debug");
     ArrayFile.Writer writer = new ArrayFile.Writer(fs, file, RandomDatum.class);
     writer.setIndexInterval(100);
     for (int i = 0; i < data.length; i++)
@@ -75,7 +76,7 @@
   private static void readTest(FileSystem fs, RandomDatum[] data, String file, Configuration conf)
     throws IOException {
     RandomDatum v = new RandomDatum();
-    LOG.fine("reading " + data.length + " records");
+    LOG.debug("reading " + data.length + " debug");
     ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
     for (int i = 0; i < data.length; i++) {       // try forwards
       reader.get(i, v);
@@ -90,7 +91,7 @@
       }
     }
     reader.close();
-    LOG.fine("done reading " + data.length + " records");
+    LOG.debug("done reading " + data.length + " debug");
   }
 
 
@@ -130,8 +131,6 @@
         LOG.info("create = " + create);
         LOG.info("check = " + check);
         LOG.info("file = " + file);
-
-        LOG.setLevel(Level.FINE);
 
         RandomDatum[] data = generate(count);
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Fri Jun  2 12:14:22 2006
@@ -19,7 +19,8 @@
 import java.io.*;
 import java.util.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
+
+import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
@@ -27,7 +28,7 @@
 
 /** Support for flat files of binary key/value pairs. */
 public class TestSequenceFile extends TestCase {
-  private static Logger LOG = SequenceFile.LOG;
+  private static Log LOG = SequenceFile.LOG;
 
   private static Configuration conf = new Configuration();
   
@@ -68,7 +69,7 @@
                                 Path file, boolean compress)
     throws IOException {
     fs.delete(file);
-    LOG.fine("creating with " + count + " records");
+    LOG.debug("creating with " + count + " records");
     SequenceFile.Writer writer =
       new SequenceFile.Writer(fs, file, RandomDatum.class, RandomDatum.class,
                               compress);
@@ -87,7 +88,7 @@
     throws IOException {
     RandomDatum k = new RandomDatum();
     RandomDatum v = new RandomDatum();
-    LOG.fine("reading " + count + " records");
+    LOG.debug("reading " + count + " records");
     SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
     for (int i = 0; i < count; i++) {
@@ -111,14 +112,14 @@
     throws IOException {
     fs.delete(new Path(file+".sorted"));
     SequenceFile.Sorter sorter = newSorter(fs, fast, megabytes, factor);
-    LOG.fine("sorting " + count + " records");
+    LOG.debug("sorting " + count + " records");
     sorter.sort(file, file.suffix(".sorted"));
-    LOG.fine("done sorting " + count + " records");
+    LOG.info("done sorting " + count + " debug");
   }
 
   private static void checkSort(FileSystem fs, int count, int seed, Path file)
     throws IOException {
-    LOG.fine("sorting " + count + " records in memory for check");
+    LOG.info("sorting " + count + " records in memory for debug");
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
     SortedMap map = new TreeMap();
     for (int i = 0; i < count; i++) {
@@ -128,7 +129,7 @@
       map.put(key, value);
     }
 
-    LOG.fine("checking order of " + count + " records");
+    LOG.debug("checking order of " + count + " records");
     RandomDatum k = new RandomDatum();
     RandomDatum v = new RandomDatum();
     Iterator iterator = map.entrySet().iterator();
@@ -148,7 +149,7 @@
     }
 
     reader.close();
-    LOG.fine("sucessfully checked " + count + " records");
+    LOG.debug("sucessfully checked " + count + " records");
   }
 
   private static void mergeTest(FileSystem fs, int count, int seed, 
@@ -156,7 +157,7 @@
                                 int megabytes)
     throws IOException {
 
-    LOG.fine("creating "+factor+" files with "+count/factor+" records");
+    LOG.debug("creating "+factor+" files with "+count/factor+" records");
 
     SequenceFile.Writer[] writers = new SequenceFile.Writer[factor];
     Path[] names = new Path[factor];
@@ -185,11 +186,11 @@
       writers[i].close();
 
     for (int i = 0; i < factor; i++) {
-      LOG.fine("sorting file " + i + " with " + count/factor + " records");
+      LOG.debug("sorting file " + i + " with " + count/factor + " records");
       newSorter(fs, fast, megabytes, factor).sort(names[i], sortedNames[i]);
     }
 
-    LOG.fine("merging " + factor + " files with " + count/factor + " records");
+    LOG.info("merging " + factor + " files with " + count/factor + " debug");
     fs.delete(new Path(file+".sorted"));
     newSorter(fs, fast, megabytes, factor)
       .merge(sortedNames, file.suffix(".sorted"));
@@ -264,8 +265,6 @@
 
         int seed = 0;
  
-        LOG.setLevel(Level.FINE);
-
         if (create && !merge) {
             writeTest(fs, count, seed, file, compress);
             readTest(fs, count, seed, file);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java Fri Jun  2 12:14:22 2006
@@ -19,14 +19,15 @@
 import java.io.*;
 import java.util.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
+
+import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 
 /** Support for flat files of binary key/value pairs. */
 public class TestSetFile extends TestCase {
-  private static Logger LOG = SequenceFile.LOG;
+  private static Log LOG = SequenceFile.LOG;
   private static String FILE =
     System.getProperty("test.build.data",".") + "/test.set";
 
@@ -46,14 +47,14 @@
   }
 
   private static RandomDatum[] generate(int count) {
-    LOG.fine("generating " + count + " records in memory");
+    LOG.debug("generating " + count + " records in memory");
     RandomDatum[] data = new RandomDatum[count];
     RandomDatum.Generator generator = new RandomDatum.Generator();
     for (int i = 0; i < count; i++) {
       generator.next();
       data[i] = generator.getValue();
     }
-    LOG.fine("sorting " + count + " records in memory");
+    LOG.info("sorting " + count + " records in debug");
     Arrays.sort(data);
     return data;
   }
@@ -61,7 +62,7 @@
   private static void writeTest(FileSystem fs, RandomDatum[] data, String file)
     throws IOException {
     MapFile.delete(fs, file);
-    LOG.fine("creating with " + data.length + " records");
+    LOG.debug("creating with " + data.length + " records");
     SetFile.Writer writer = new SetFile.Writer(fs, file, RandomDatum.class);
     for (int i = 0; i < data.length; i++)
       writer.append(data[i]);
@@ -71,14 +72,14 @@
   private static void readTest(FileSystem fs, RandomDatum[] data, String file)
     throws IOException {
     RandomDatum v = new RandomDatum();
-    LOG.fine("reading " + data.length + " records");
+    LOG.debug("reading " + data.length + " records");
     SetFile.Reader reader = new SetFile.Reader(fs, file, conf);
     for (int i = 0; i < data.length; i++) {
       if (!reader.seek(data[i]))
         throw new RuntimeException("wrong value at " + i);
     }
     reader.close();
-    LOG.fine("done reading " + data.length + " records");
+    LOG.info("done reading " + data.length + " debug");
   }
 
 
@@ -116,8 +117,6 @@
         LOG.info("create = " + create);
         LOG.info("check = " + check);
         LOG.info("file = " + file);
-
-        LOG.setLevel(Level.FINE);
 
         RandomDatum[] data = generate(count);
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java Fri Jun  2 12:14:22 2006
@@ -16,6 +16,8 @@
 
 package org.apache.hadoop.ipc;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.LongWritable;
 
@@ -25,26 +27,15 @@
 
 import junit.framework.TestCase;
 
-import java.util.logging.Logger;
-import java.util.logging.Level;
-
-import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.conf.Configuration;
 
 /** Unit tests for IPC. */
 public class TestIPC extends TestCase {
-  public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.hadoop.ipc.TestIPC");
+  public static final Log LOG =
+    LogFactory.getLog("org.apache.hadoop.ipc.TestIPC");
 
   private static Configuration conf = new Configuration();
   
-  // quiet during testing, since output ends up on console
-  static {
-    LOG.setLevel(Level.WARNING);
-    Client.LOG.setLevel(Level.WARNING);
-    Server.LOG.setLevel(Level.WARNING);
-  }
-
   public TestIPC(String name) { super(name); }
 
   private static final Random RANDOM = new Random();
@@ -88,12 +79,12 @@
           LongWritable value =
             (LongWritable)client.call(param, new InetSocketAddress(PORT));
           if (!param.equals(value)) {
-            LOG.severe("Call failed!");
+            LOG.fatal("Call failed!");
             failed = true;
             break;
           }
         } catch (Exception e) {
-          LOG.severe("Caught: " + e);
+          LOG.fatal("Caught: " + e);
           failed = true;
         }
       }
@@ -123,13 +114,13 @@
           Writable[] values = client.call(params, addresses);
           for (int j = 0; j < addresses.length; j++) {
             if (!params[j].equals(values[j])) {
-              LOG.severe("Call failed!");
+              LOG.fatal("Call failed!");
               failed = true;
               break;
             }
           }
         } catch (Exception e) {
-          LOG.severe("Caught: " + e);
+          LOG.fatal("Caught: " + e);
           failed = true;
         }
       }
@@ -209,11 +200,6 @@
   }
 	
   public static void main(String[] args) throws Exception {
-    // crank up the volume!
-    LOG.setLevel(Level.FINE);
-    Client.LOG.setLevel(Level.FINE);
-    Server.LOG.setLevel(Level.FINE);
-    LogFormatter.setShowThreadIDs(true);
 
     //new TestIPC("test").testSerial(5, false, 2, 10, 1000);
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java Fri Jun  2 12:14:22 2006
@@ -22,11 +22,10 @@
 
 import junit.framework.TestCase;
 
-import java.util.logging.Logger;
-import java.util.logging.Level;
 import java.util.Arrays;
 
-import org.apache.hadoop.util.LogFormatter;
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.Writable;
@@ -35,17 +34,14 @@
 public class TestRPC extends TestCase {
   private static final int PORT = 1234;
 
-  public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.hadoop.ipc.TestRPC");
+  public static final Log LOG =
+    LogFactory.getLog("org.apache.hadoop.ipc.TestRPC");
   
   private static Configuration conf = new Configuration();
 
   // quiet during testing, since output ends up on console
   static {
     conf.setInt("ipc.client.timeout", 5000);
-    LOG.setLevel(Level.WARNING);
-    Client.LOG.setLevel(Level.WARNING);
-    Server.LOG.setLevel(Level.WARNING);
   }
 
   public TestRPC(String name) { super(name); }
@@ -134,7 +130,7 @@
     try {
       proxy.error();
     } catch (IOException e) {
-      LOG.fine("Caught " + e);
+      LOG.debug("Caught " + e);
       caught = true;
     }
     assertTrue(caught);
@@ -156,11 +152,6 @@
     server.stop();
   }
   public static void main(String[] args) throws Exception {
-    // crank up the volume!
-    LOG.setLevel(Level.FINE);
-    Client.LOG.setLevel(Level.FINE);
-    Server.LOG.setLevel(Level.FINE);
-    LogFormatter.setShowThreadIDs(true);
 
     new TestRPC("test").testCalls();
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMapRed.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMapRed.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMapRed.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMapRed.java Fri Jun  2 12:14:22 2006
@@ -273,7 +273,7 @@
         // file of random numbers.
         //
         Path randomOuts = new Path(testdir, "genouts");
-        //fs.mkdirs(randomOuts);
+        fs.delete(randomOuts);
 
 
         JobConf genJob = new JobConf(conf);
@@ -322,7 +322,7 @@
         //
         int intermediateReduces = 10;
         Path intermediateOuts = new Path(testdir, "intermediateouts");
-        //fs.mkdirs(intermediateOuts);
+        fs.delete(intermediateOuts);
         JobConf checkJob = new JobConf(conf);
         checkJob.setInputPath(randomOuts);
         checkJob.setInputKeyClass(LongWritable.class);
@@ -347,7 +347,7 @@
         // all the files.
         //
         Path finalOuts = new Path(testdir, "finalouts");        
-        //fs.mkdirs(finalOuts);
+        fs.delete(finalOuts);
         JobConf mergeJob = new JobConf(conf);
         mergeJob.setInputPath(intermediateOuts);
         mergeJob.setInputKeyClass(IntWritable.class);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java Fri Jun  2 12:14:22 2006
@@ -19,14 +19,15 @@
 import java.io.*;
 import java.util.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
+
+import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
 
 public class TestSequenceFileInputFormat extends TestCase {
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
 
   private static int MAX_LENGTH = 10000;
   private static Configuration conf = new Configuration();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java Fri Jun  2 12:14:22 2006
@@ -19,14 +19,15 @@
 import java.io.*;
 import java.util.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
+
+import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
 
 public class TestTextInputFormat extends TestCase {
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
 
   private static int MAX_LENGTH = 10000;
   private static Configuration conf = new Configuration();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestMapRed.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestMapRed.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestMapRed.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestMapRed.java Fri Jun  2 12:14:22 2006
@@ -283,7 +283,7 @@
         // file of random numbers.
         //
         File randomOuts = new File(testdir, "genouts");
-        //fs.mkdirs(randomOuts);
+        fs.delete(randomOuts);
 
 
         JobConf genJob = new JobConf(conf);
@@ -332,7 +332,7 @@
         //
         int intermediateReduces = 10;
         File intermediateOuts = new File(testdir, "intermediateouts");
-        //fs.mkdirs(intermediateOuts);
+        fs.delete(intermediateOuts);
         JobConf checkJob = new JobConf(conf);
         checkJob.setInputDir(randomOuts);
         checkJob.setInputKeyClass(RecInt.class);
@@ -357,7 +357,7 @@
         // all the files.
         //
         File finalOuts = new File(testdir, "finalouts");        
-        //fs.mkdirs(finalOuts);
+        fs.delete(finalOuts);
         JobConf mergeJob = new JobConf(conf);
         mergeJob.setInputDir(intermediateOuts);
         mergeJob.setInputKeyClass(RecInt.class);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestWritable.java?rev=411254&r1=411253&r2=411254&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestWritable.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/record/test/TestWritable.java Fri Jun  2 12:14:22 2006
@@ -19,11 +19,11 @@
 import java.io.*;
 import java.util.*;
 import junit.framework.TestCase;
-import java.util.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
+import org.apache.commons.logging.*;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputFormatBase;
@@ -33,7 +33,7 @@
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
 public class TestWritable extends TestCase {
-  private static final Logger LOG = InputFormatBase.LOG;
+  private static final Log LOG = InputFormatBase.LOG;
 
   private static int MAX_LENGTH = 10000;
   private static Configuration conf = new Configuration();