You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ga...@apache.org on 2008/02/12 22:22:43 UTC

svn commit: r627115 - in /incubator/pig/trunk: ./ lib-src/bzip2/org/apache/tools/bzip2r/ lib-src/shock/org/apache/pig/shock/ src/org/apache/pig/ src/org/apache/pig/backend/hadoop/executionengine/ src/org/apache/pig/backend/hadoop/executionengine/mapred...

Author: gates
Date: Tue Feb 12 13:22:40 2008
New Revision: 627115

URL: http://svn.apache.org/viewvc?rev=627115&view=rev
Log:
PIG-83: Change everything except grunt and Main (PigServer on down) to use common logging abstraction instead of log4j.  By default in grunt, log4j still used as logging layer.  Also converted all System.out/err.println statements to use logging instead. 

Modified:
    incubator/pig/trunk/CHANGES.txt
    incubator/pig/trunk/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java
    incubator/pig/trunk/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java
    incubator/pig/trunk/src/org/apache/pig/Main.java
    incubator/pig/trunk/src/org/apache/pig/PigServer.java
    incubator/pig/trunk/src/org/apache/pig/StandAloneParser.java
    incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
    incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/POMapreduce.java
    incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/MapReduceLauncher.java
    incubator/pig/trunk/src/org/apache/pig/backend/local/executionengine/POStore.java
    incubator/pig/trunk/src/org/apache/pig/data/DefaultDataBag.java
    incubator/pig/trunk/src/org/apache/pig/data/DistinctDataBag.java
    incubator/pig/trunk/src/org/apache/pig/data/SortedDataBag.java
    incubator/pig/trunk/src/org/apache/pig/data/TimestampedTuple.java
    incubator/pig/trunk/src/org/apache/pig/data/Tuple.java
    incubator/pig/trunk/src/org/apache/pig/impl/PigContext.java
    incubator/pig/trunk/src/org/apache/pig/impl/builtin/ShellBagEvalFunc.java
    incubator/pig/trunk/src/org/apache/pig/impl/eval/GenerateSpec.java
    incubator/pig/trunk/src/org/apache/pig/impl/eval/cond/FuncCond.java
    incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOEval.java
    incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java
    incubator/pig/trunk/src/org/apache/pig/impl/util/JarManager.java
    incubator/pig/trunk/src/org/apache/pig/impl/util/PigLogger.java
    incubator/pig/trunk/src/org/apache/pig/impl/util/SpillableMemoryManager.java
    incubator/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java
    incubator/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java
    incubator/pig/trunk/test/org/apache/pig/test/TestCompressedFiles.java
    incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpNumeric.java
    incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpString.java
    incubator/pig/trunk/test/org/apache/pig/test/TestInfixArithmetic.java
    incubator/pig/trunk/test/org/apache/pig/test/TestOrderBy.java
    incubator/pig/trunk/test/org/apache/pig/test/TestPi.java
    incubator/pig/trunk/test/org/apache/pig/test/TestPigFile.java

Modified: incubator/pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/CHANGES.txt?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/CHANGES.txt (original)
+++ incubator/pig/trunk/CHANGES.txt Tue Feb 12 13:22:40 2008
@@ -97,3 +97,8 @@
 
 	PIG-92: Fix NullPointerException in PIgContext due to uninitialized conf
 	reference. (francisoud via gates)
+
+	PIG-83: Change everything except grunt and Main (PigServer on down) to use
+	common logging abstraction instead of log4j.  By default in grunt, log4j
+	still used as logging layer.  Also converted all System.out/err.println
+	statements to use logging instead. (francisoud via gates)

Modified: incubator/pig/trunk/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java (original)
+++ incubator/pig/trunk/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java Tue Feb 12 13:22:40 2008
@@ -63,6 +63,9 @@
 import java.io.OutputStream;
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 /**
  * An output stream that compresses into the BZip2 format (without the file
  * header chars) into another stream.
@@ -72,6 +75,9 @@
  * TODO:    Update to BZip2 1.0.1
  */
 public class CBZip2OutputStream extends OutputStream implements BZip2Constants {
+
+    private final static Log log = LogFactory.getLog(CBZip2OutputStream.class);
+    
     protected static final int SETMASK = (1 << 21);
     protected static final int CLEARMASK = (~SETMASK);
     protected static final int GREATER_ICOST = 15;
@@ -90,7 +96,7 @@
     protected static final int QSORT_STACK_SIZE = 1000;
 
     private static void panic() {
-        System.out.println("panic");
+        log.info("panic");
         //throw new CError();
     }
 

Modified: incubator/pig/trunk/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java (original)
+++ incubator/pig/trunk/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java Tue Feb 12 13:22:40 2008
@@ -38,6 +38,9 @@
 import java.util.HashMap;
 import java.util.Properties;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 import com.jcraft.jsch.ChannelDirectTCPIP;
 import com.jcraft.jsch.ChannelExec;
 import com.jcraft.jsch.JSch;
@@ -67,6 +70,9 @@
  *
  */
 public class SSHSocketImplFactory implements SocketImplFactory, Logger {
+    
+    private static final Log log = LogFactory.getLog(SSHSocketImplFactory.class);
+
 	Session session;
 
 	public static SSHSocketImplFactory getFactory() throws JSchException, IOException {
@@ -136,7 +142,7 @@
 	}
 
 	public void log(int arg0, String arg1) {
-		System.err.println(arg0 + ": " + arg1);
+		log.error(arg0 + ": " + arg1);
 	}
 
 	class SSHProcess extends Process {
@@ -226,6 +232,8 @@
  */
 class SSHSocketFactory implements SocketFactory {
 
+    private final static Log log = LogFactory.getLog(SSHSocketFactory.class);
+    
 	public Socket createSocket(String host, int port) throws IOException,
 			UnknownHostException {
 		String socksHost = System.getProperty("socksProxyHost");
@@ -237,7 +245,7 @@
 			s = new Socket(proxy);
 			s.connect(addr);
 		} else {
-			System.err.println(addr);
+			log.error(addr);
 			SocketChannel sc = SocketChannel.open(addr);
 			s = sc.socket();
 		}
@@ -302,6 +310,9 @@
  * things to SSH.
  */
 class SSHSocketImpl extends SocketImpl {
+    
+    private static final Log log = LogFactory.getLog(SSHSocketImpl.class);
+
 	Session session;
 
 	ChannelDirectTCPIP channel;
@@ -384,10 +395,10 @@
 							(PipedInputStream) is));
 			channel.connect();
 			if (!channel.isConnected()) {
-				System.err.println("Not connected");
+				log.error("Not connected");
 			}
 			if (channel.isEOF()) {
-				System.err.println("EOF");
+				log.error("EOF");
 			}
 		} catch (JSchException e) {
 			e.printStackTrace();
@@ -445,9 +456,9 @@
 					@Override
 					public void run() {
 						try {
-							System.err.println("Starting " + this);
+							log.error("Starting " + this);
 							connectTest("www.yahoo.com");
-							System.err.println("Finished " + this);
+							log.error("Finished " + this);
 						} catch (Exception e) {
 							e.printStackTrace();
 						}
@@ -456,15 +467,15 @@
 			}
 			Thread.sleep(1000000);
 			connectTest("www.news.com");
-			System.out.println("******** Starting PART II");
+			log.info("******** Starting PART II");
 			for (int i = 0; i < 10; i++) {
 				new Thread() {
 					@Override
 					public void run() {
 						try {
-							System.err.println("Starting " + this);
+							log.error("Starting " + this);
 							connectTest("www.flickr.com");
-							System.err.println("Finished " + this);
+							log.error("Finished " + this);
 						} catch (Exception e) {
 							e.printStackTrace();
 						}

Modified: incubator/pig/trunk/src/org/apache/pig/Main.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/Main.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/Main.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/Main.java Tue Feb 12 13:22:40 2008
@@ -277,19 +277,19 @@
     
 public static void usage()
 {
-    System.err.println("USAGE: Pig [options] [-] : Run interactively in grunt shell.");
-    System.err.println("       Pig [options] -e[xecute] cmd [cmd ...] : Run cmd(s).");
-    System.err.println("       Pig [options] [-f[ile]] file : Run cmds found in file.");
-    System.err.println("  options include:");
-    System.err.println("    -4, -log4jconf log4j configuration file, overrides log conf");
-    System.err.println("    -b, -brief brief logging (no timestamps)");
-    System.err.println("    -c, -cluster clustername, kryptonite is default");
-    System.err.println("    -d, -debug debug level, INFO is default");
-    System.err.println("    -h, -help display this message");
-    System.err.println("    -j, -jar jarfile load jarfile"); 
-    System.err.println("    -o, -hod read hod server from system property ssh.gateway");
-    System.err.println("    -v, -verbose print all log messages to screen (default to print only INFO and above to screen)");
-    System.err.println("    -x, -exectype local|mapreduce, mapreduce is default");
+    System.out.println("USAGE: Pig [options] [-] : Run interactively in grunt shell.");
+    System.out.println("       Pig [options] -e[xecute] cmd [cmd ...] : Run cmd(s).");
+    System.out.println("       Pig [options] [-f[ile]] file : Run cmds found in file.");
+    System.out.println("  options include:");
+    System.out.println("    -4, -log4jconf log4j configuration file, overrides log conf");
+    System.out.println("    -b, -brief brief logging (no timestamps)");
+    System.out.println("    -c, -cluster clustername, kryptonite is default");
+    System.out.println("    -d, -debug debug level, INFO is default");
+    System.out.println("    -h, -help display this message");
+    System.out.println("    -j, -jar jarfile load jarfile"); 
+    System.out.println("    -o, -hod read hod server from system property ssh.gateway");
+    System.out.println("    -v, -verbose print all log messages to screen (default to print only INFO and above to screen)");
+    System.out.println("    -x, -exectype local|mapreduce, mapreduce is default");
 }
         
         

Modified: incubator/pig/trunk/src/org/apache/pig/PigServer.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/PigServer.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/PigServer.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/PigServer.java Tue Feb 12 13:22:40 2008
@@ -20,32 +20,38 @@
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.Enumeration;
-import java.net.URL;
-import java.util.Date;
 import java.util.Properties;
-import java.util.Collection;
-import java.util.ArrayList;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.pig.backend.datastorage.ContainerDescriptor;
+import org.apache.pig.backend.datastorage.DataStorage;
+import org.apache.pig.backend.datastorage.DataStorageException;
+import org.apache.pig.backend.datastorage.ElementDescriptor;
+import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.backend.executionengine.ExecPhysicalPlan;
+import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
+import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.MapReduceLauncher;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.io.FileLocalizer;
+import org.apache.pig.impl.logicalLayer.LogicalOperator;
 import org.apache.pig.impl.logicalLayer.LogicalPlan;
 import org.apache.pig.impl.logicalLayer.LogicalPlanBuilder;
-import org.apache.pig.impl.logicalLayer.parser.ParseException;
 import org.apache.pig.impl.logicalLayer.OperatorKey;
-import org.apache.pig.impl.logicalLayer.LogicalOperator;
+import org.apache.pig.impl.logicalLayer.parser.ParseException;
 import org.apache.pig.impl.logicalLayer.parser.QueryParser;
 import org.apache.pig.impl.logicalLayer.schema.TupleSchema;
-import org.apache.pig.backend.executionengine.*;
-import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
-import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.MapReduceLauncher;
-import org.apache.pig.backend.datastorage.*;
-import org.apache.pig.impl.util.PigLogger;
 
 
 /**
@@ -57,6 +63,8 @@
  */
 public class PigServer {
     
+    private final Log log = LogFactory.getLog(getClass());
+    
     /**
      * The type of query execution
      */
@@ -164,7 +172,7 @@
                 logMessage += (logMessage + urls.nextElement() + "; ");
             }
             
-            PigLogger.getLogger().debug(logMessage);
+            log.debug(logMessage);
         }
     
         return resourceLocation;
@@ -344,7 +352,7 @@
         stream.println("Logical Plan:");
         LogicalPlan lp = aliases.get(alias);
         if (lp == null) {
-            PigLogger.getLogger().error("Invalid alias: " + alias);
+            log.error("Invalid alias: " + alias);
             stream.println("Invalid alias: " + alias);
             throw new IOException("Invalid alias: " + alias);
         }
@@ -360,7 +368,7 @@
             pp.explain(stream);
         }
         catch (ExecException e) {
-            PigLogger.getLogger().error("Failed to compile to physical plan: " + alias);
+            log.error("Failed to compile to physical plan: " + alias);
             stream.println("Failed to compile the logical plan for " + alias + " into a physical plan");
             IOException ioe = new IOException("Failed to compile to phyiscal plan: " + alias);
             ioe.initCause(e);

Modified: incubator/pig/trunk/src/org/apache/pig/StandAloneParser.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/StandAloneParser.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/StandAloneParser.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/StandAloneParser.java Tue Feb 12 13:22:40 2008
@@ -17,16 +17,21 @@
  */
 package org.apache.pig;
 
-import java.io.*;
-import java.util.*;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.Iterator;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer.ExecType;
-import org.apache.pig.impl.logicalLayer.LogicalPlan;
-
 import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.impl.logicalLayer.LogicalPlan;
 
 public class StandAloneParser {
     
+    private static final Log log = LogFactory.getLog(StandAloneParser.class);
+    
     static PigServer pig;
     
     public static void main(String args[]) throws IOException, ExecException {
@@ -68,7 +73,7 @@
                 else System.out.print("\n");
             }
         } catch (IOException e) {
-            System.err.println(e);
+            log.error(e);
         }
     }
 }

Modified: incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java Tue Feb 12 13:22:40 2008
@@ -8,42 +8,44 @@
 import java.net.SocketImplFactory;
 import java.net.UnknownHostException;
 import java.util.Collection;
+import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
-import java.util.Enumeration;
 
-import org.apache.pig.impl.PigContext;
-import org.apache.pig.builtin.BinStorage;
-import org.apache.pig.impl.io.FileLocalizer;
-import org.apache.pig.impl.io.FileSpec;
-import org.apache.pig.backend.executionengine.*;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobSubmissionProtocol;
+import org.apache.hadoop.mapred.JobTracker;
+import org.apache.pig.backend.datastorage.DataStorage;
+import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.backend.executionengine.ExecLogicalPlan;
+import org.apache.pig.backend.executionengine.ExecPhysicalOperator;
+import org.apache.pig.backend.executionengine.ExecPhysicalPlan;
+import org.apache.pig.backend.executionengine.ExecutionEngine;
 import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
-import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
 import org.apache.pig.backend.hadoop.datastorage.HConfiguration;
-import org.apache.pig.backend.datastorage.DataStorage;
-import org.apache.pig.impl.logicalLayer.OperatorKey;
-import org.apache.pig.impl.physicalLayer.PhysicalOperator;
-
+import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
 import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.MapReduceLauncher;
-import org.apache.pig.backend.local.executionengine.LocalResult;
+import org.apache.pig.builtin.BinStorage;
 import org.apache.pig.data.Tuple;
-
-import org.apache.log4j.Logger;
-
+import org.apache.pig.impl.PigContext;
+import org.apache.pig.impl.io.FileLocalizer;
+import org.apache.pig.impl.io.FileSpec;
+import org.apache.pig.impl.logicalLayer.OperatorKey;
 import org.apache.pig.shock.SSHSocketImplFactory;
-import org.apache.hadoop.mapred.JobTracker;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.mapred.JobSubmissionProtocol;
-import org.apache.hadoop.mapred.JobClient;
 
 
 public class HExecutionEngine implements ExecutionEngine {
     
+    private final Log log = LogFactory.getLog(getClass());
+    
     protected PigContext pigContext;
     
-    protected Logger logger;
     protected DataStorage ds;
     protected HConfiguration conf;
     
@@ -60,10 +62,8 @@
     protected Map<OperatorKey, MapRedResult> materializedResults;
     
     public HExecutionEngine(PigContext pigContext,
-                            Logger logger,
                             HConfiguration conf) {
         this.pigContext = pigContext;
-        this.logger = logger;
         this.conf = conf;
         this.logicalToPhysicalKeys = new HashMap<OperatorKey, OperatorKey>();
         this.physicalOpTable = new HashMap<OperatorKey, ExecPhysicalOperator>();
@@ -84,8 +84,8 @@
         return this.materializedResults;
     }
     
-    public HExecutionEngine(PigContext pigContext, Logger logger) {
-        this(pigContext, logger, new HConfiguration(new JobConf()));
+    public HExecutionEngine(PigContext pigContext) {
+        this(pigContext, new HConfiguration(new JobConf()));
     }
                             
     public Map<OperatorKey, ExecPhysicalOperator> getPhysicalOpTable() {
@@ -134,7 +134,7 @@
             }
         }
      
-        logger.info("Connecting to hadoop file system at: " + conf.get("fs.default.name"));
+        log.info("Connecting to hadoop file system at: " + conf.get("fs.default.name"));
 
         try {
             ds = new HDataStorage(conf);
@@ -143,7 +143,7 @@
             throw new ExecException("Failed to create DataStorage", e);
         }
             
-        logger.info("Connecting to map-reduce job tracker at: " + conf.get("mapred.job.tracker"));
+        log.info("Connecting to map-reduce job tracker at: " + conf.get("mapred.job.tracker"));
         
         try {
             jobTracker = (JobSubmissionProtocol) RPC.getProxy(JobSubmissionProtocol.class,
@@ -354,8 +354,8 @@
             
             InputStream is = p.getInputStream();
 
-            logger.info("Connecting to HOD...");
-            logger.debug("sending HOD command " + cmd.toString());
+            log.info("Connecting to HOD...");
+            log.debug("sending HOD command " + cmd.toString());
 
             StringBuffer sb = new StringBuffer();
             int c;
@@ -372,23 +372,23 @@
                     switch(current) {
                     case HDFSUI:
                         hdfsUI = sb.toString().trim();
-                        logger.info("HDFS Web UI: " + hdfsUI);
+                        log.info("HDFS Web UI: " + hdfsUI);
                         break;
                     case HDFS:
                         hdfs = sb.toString().trim();
-                        logger.info("HDFS: " + hdfs);
+                        log.info("HDFS: " + hdfs);
                         break;
                     case MAPREDUI:
                         mapredUI = sb.toString().trim();
-                        logger.info("JobTracker Web UI: " + mapredUI);
+                        log.info("JobTracker Web UI: " + mapredUI);
                         break;
                     case MAPRED:
                         mapred = sb.toString().trim();
-                        logger.info("JobTracker: " + mapred);
+                        log.info("JobTracker: " + mapred);
                         break;
                     case HADOOPCONF:
                         hadoopConf = sb.toString().trim();
-                        logger.info("HadoopConf: " + hadoopConf);
+                        log.info("HadoopConf: " + hadoopConf);
                         break;
                     }
                     current = ParsingState.NOTHING;

Modified: incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/POMapreduce.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/POMapreduce.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/POMapreduce.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/POMapreduce.java Tue Feb 12 13:22:40 2008
@@ -22,9 +22,10 @@
 import java.util.ArrayList;
 import java.util.Map;
 
-import org.apache.log4j.Logger;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.WritableComparator;
+import org.apache.log4j.Logger;
 import org.apache.pig.backend.executionengine.ExecPhysicalOperator;
 import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.MapReduceLauncher;
 import org.apache.pig.data.Tuple;
@@ -37,10 +38,11 @@
 import org.apache.pig.impl.physicalLayer.PhysicalOperator;
 import org.apache.pig.impl.physicalLayer.POVisitor;
 import org.apache.pig.impl.util.ObjectSerializer;
-import org.apache.pig.impl.util.PigLogger;
 
 public class POMapreduce extends PhysicalOperator {
     private static final long serialVersionUID = 1L;
+
+	private final Log log = LogFactory.getLog(getClass());
     
     public ArrayList<EvalSpec> toMap             = new ArrayList<EvalSpec>();
     public EvalSpec     toCombine         = null;
@@ -200,7 +202,6 @@
     }
 
     void print() {
-        Logger log = PigLogger.getLogger();
         log.info("----- MapReduce Job -----");
         log.info("Input: " + inputFileSpecs);
         log.info("Map: " + toMap);

Modified: incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/MapReduceLauncher.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/MapReduceLauncher.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/MapReduceLauncher.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/MapReduceLauncher.java Tue Feb 12 13:22:40 2008
@@ -16,39 +16,36 @@
  */
 package org.apache.pig.backend.hadoop.executionengine.mapreduceExec;
 
+import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
-import java.io.File;
-import java.io.FileOutputStream;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.TaskReport;
 import org.apache.log4j.Logger;
-import org.apache.pig.impl.util.PigLogger;
+import org.apache.pig.backend.datastorage.DataStorageException;
+import org.apache.pig.backend.datastorage.ElementDescriptor;
+import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
+import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
 import org.apache.pig.backend.hadoop.executionengine.POMapreduce;
 import org.apache.pig.builtin.PigStorage;
-import org.apache.pig.data.DataBag;
+import org.apache.pig.data.BagFactory;
 import org.apache.pig.data.IndexedTuple;
 import org.apache.pig.data.Tuple;
-import org.apache.pig.data.BagFactory;
 import org.apache.pig.impl.eval.EvalSpec;
 import org.apache.pig.impl.io.PigFile;
 import org.apache.pig.impl.util.JarManager;
 import org.apache.pig.impl.util.ObjectSerializer;
-import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
-import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
-import org.apache.pig.backend.datastorage.ElementDescriptor;
-import org.apache.pig.backend.datastorage.DataStorageException;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.TaskReport;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.RunningJob; 
 
 
 /**
@@ -60,6 +57,8 @@
  */
 public class MapReduceLauncher {
     
+    private final Log log = LogFactory.getLog(getClass());
+    
     public static long totalHadoopTimeSpent = 0;
     public static int numMRJobs;
     public static int mrJobNumber;
@@ -115,7 +114,6 @@
      * @throws IOException
      */
     public boolean launchPig(POMapreduce pom) throws IOException {
-        Logger log = PigLogger.getLogger();
         JobConf conf = new JobConf(config);
         conf.setJobName(pom.pigContext.getJobName());
         boolean success = false;
@@ -273,9 +271,9 @@
             if (!success) {
                 // go find the error messages
                 getErrorMessages(jobClient.getMapTaskReports(status.getJobID()),
-                        "map", log);
+                        "map");
                 getErrorMessages(jobClient.getReduceTaskReports(status.getJobID()),
-                        "reduce", log);
+                        "reduce");
             }
             else {
                 long timeSpent = 0;
@@ -303,7 +301,7 @@
         return success;
     }
 
-private void getErrorMessages(TaskReport reports[], String type, Logger log)
+private void getErrorMessages(TaskReport reports[], String type)
 {
     for (int i = 0; i < reports.length; i++) {
         String msgs[] = reports[i].getDiagnostics();

Modified: incubator/pig/trunk/src/org/apache/pig/backend/local/executionengine/POStore.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/backend/local/executionengine/POStore.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/backend/local/executionengine/POStore.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/backend/local/executionengine/POStore.java Tue Feb 12 13:22:40 2008
@@ -21,6 +21,8 @@
 import java.io.PrintStream;
 import java.util.Map;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.StoreFunc;
 import org.apache.pig.backend.executionengine.ExecPhysicalOperator;
 import org.apache.pig.data.DataBag;
@@ -36,10 +38,10 @@
 
 
 public class POStore extends PhysicalOperator {
-    /**
-     * 
-     */
     private static final long serialVersionUID = 1L;
+    
+    private final Log log = LogFactory.getLog(getClass());
+    
     private PigFile f;
     private String funcSpec;
     boolean append = false;
@@ -119,7 +121,7 @@
     
     @Override
     public int getOutputType(){
-        System.err.println("No one should be asking my output type");
+        log.error("No one should be asking my output type");
         RuntimeException runtimeException = new RuntimeException();
         runtimeException.printStackTrace();
         throw runtimeException;

Modified: incubator/pig/trunk/src/org/apache/pig/data/DefaultDataBag.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/data/DefaultDataBag.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/data/DefaultDataBag.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/data/DefaultDataBag.java Tue Feb 12 13:22:40 2008
@@ -17,17 +17,18 @@
  */
 package org.apache.pig.data;
 
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.io.DataOutputStream;
-import java.io.DataInputStream;
 import java.io.BufferedInputStream;
-import java.io.FileInputStream;
-import java.io.IOException;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.EOFException;
+import java.io.FileInputStream;
 import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
 
-import org.apache.pig.impl.util.PigLogger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 
 /**
@@ -37,6 +38,8 @@
  */
 public class DefaultDataBag extends DataBag {
 
+    private final Log log = LogFactory.getLog(getClass());
+
     public DefaultDataBag() {
         mContents = new ArrayList<Tuple>();
     }
@@ -71,7 +74,7 @@
             }  catch (IOException ioe) {
                 // Do not remove last file from spilled array. It was not
                 // added as File.createTmpFile threw an IOException
-                PigLogger.getLogger().error(
+                log.error(
                     "Unable to create tmp file to spill to disk", ioe);
                 return 0;
             }
@@ -88,7 +91,7 @@
                 // Remove the last file from the spilled array, since we failed to
                 // write to it.
                 mSpillFiles.remove(mSpillFiles.size() - 1);
-                PigLogger.getLogger().error(
+                log.error(
                     "Unable to spill contents to disk", ioe);
                 return 0;
             } finally {
@@ -96,7 +99,7 @@
                     try {
                         out.close();
                     } catch (IOException e) {
-                        PigLogger.getLogger().error("Error closing spill", e);
+                        log.error("Error closing spill", e);
                     }
                 }
             }
@@ -175,7 +178,7 @@
                 } catch (FileNotFoundException fnfe) {
                     // We can't find our own spill file?  That should never
                     // happen.
-                    PigLogger.getLogger().fatal(
+                    log.fatal(
                         "Unable to find our spill file", fnfe);
                     throw new RuntimeException(fnfe);
                 }
@@ -186,11 +189,11 @@
                     } catch (EOFException eof) {
                         // This should never happen, it means we
                         // didn't dump all of our tuples to disk.
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Ran out of tuples too soon.", eof);
                         throw new RuntimeException("Ran out of tuples to read prematurely.");
                     } catch (IOException ioe) {
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to read our spill file", ioe);
                         throw new RuntimeException(ioe);
                     }
@@ -220,7 +223,7 @@
                     // Fall through to the next case where we find the
                     // next file, or go to memory
                 } catch (IOException ioe) {
-                    PigLogger.getLogger().fatal(
+                    log.fatal(
                         "Unable to read our spill file", ioe);
                     throw new RuntimeException(ioe);
                 }
@@ -249,7 +252,7 @@
             } catch (FileNotFoundException fnfe) {
                 // We can't find our own spill file?  That should never
                 // happen.
-                PigLogger.getLogger().fatal("Unable to find our spill file",
+                log.fatal("Unable to find our spill file",
                     fnfe);
                 throw new RuntimeException(fnfe);
             }

Modified: incubator/pig/trunk/src/org/apache/pig/data/DistinctDataBag.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/data/DistinctDataBag.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/data/DistinctDataBag.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/data/DistinctDataBag.java Tue Feb 12 13:22:40 2008
@@ -17,26 +17,25 @@
  */
 package org.apache.pig.data;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.ListIterator;
-import java.util.TreeSet;
-import java.util.Arrays;
 import java.io.BufferedInputStream;
-import java.io.DataOutputStream;
 import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.EOFException;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.ListIterator;
+import java.util.TreeSet;
 
-import org.apache.pig.impl.eval.EvalSpec;
-import org.apache.pig.impl.util.PigLogger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 
 
@@ -51,6 +50,9 @@
  * found to be faster than storing it in a TreeSet.
  */
 public class DistinctDataBag extends DataBag {
+
+    private final Log log = LogFactory.getLog(getClass());
+
     public DistinctDataBag() {
         mContents = new HashSet<Tuple>();
     }
@@ -108,7 +110,7 @@
             }  catch (IOException ioe) {
                 // Do not remove last file from spilled array. It was not
                 // added as File.createTmpFile threw an IOException
-                PigLogger.getLogger().error(
+                log.error(
                     "Unable to create tmp file to spill to disk", ioe);
                 return 0;
             }
@@ -140,7 +142,7 @@
                 // Remove the last file from the spilled array, since we failed to
                 // write to it.
                 mSpillFiles.remove(mSpillFiles.size() - 1);
-                PigLogger.getLogger().error(
+                log.error(
                     "Unable to spill contents to disk", ioe);
                 return 0;
             } finally {
@@ -148,7 +150,7 @@
                     try {
                         out.close();
                     } catch (IOException e) {
-                        PigLogger.getLogger().error("Error closing spill", e);
+                        log.error("Error closing spill", e);
                     }
                 }
             }
@@ -256,7 +258,7 @@
                 } catch (FileNotFoundException fnfe) {
                     // We can't find our own spill file?  That should never
                     // happen.
-                    PigLogger.getLogger().fatal(
+                    log.fatal(
                         "Unable to find our spill file", fnfe);
                     throw new RuntimeException(fnfe);
                 }
@@ -272,7 +274,7 @@
                         // didn't dump all of our tuples to disk.
                         throw new RuntimeException("Ran out of tuples to read prematurely.");
                     } catch (IOException ioe) {
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to read our spill file", ioe);
                         throw new RuntimeException(ioe);
                     }
@@ -317,7 +319,7 @@
                     } catch (FileNotFoundException fnfe) {
                         // We can't find our own spill file?  That should
                         // never happen.
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to find out spill file.", fnfe);
                         throw new RuntimeException(fnfe);
                     }
@@ -392,7 +394,7 @@
                         mStreams.set(fileNum, null);
                         return;
                     } catch (IOException ioe) {
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to read our spill file", ioe);
                         throw new RuntimeException(ioe);
                     }
@@ -459,7 +461,7 @@
                         } catch (FileNotFoundException fnfe) {
                             // We can't find our own spill file?  That should
                             // neer happen.
-                            PigLogger.getLogger().fatal(
+                            log.fatal(
                                 "Unable to find out spill file.", fnfe);
                             throw new RuntimeException(fnfe);
                         }
@@ -478,7 +480,7 @@
                         }
                         out.flush();
                     } catch (IOException ioe) {
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to read our spill file", ioe);
                         throw new RuntimeException(ioe);
                     }

Modified: incubator/pig/trunk/src/org/apache/pig/data/SortedDataBag.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/data/SortedDataBag.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/data/SortedDataBag.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/data/SortedDataBag.java Tue Feb 12 13:22:40 2008
@@ -17,13 +17,6 @@
  */
 package org.apache.pig.data;
 
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.ListIterator;
-import java.util.PriorityQueue;
-import java.util.Iterator;
 import java.io.BufferedInputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
@@ -32,9 +25,17 @@
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.ListIterator;
+import java.util.PriorityQueue;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.impl.eval.EvalSpec;
-import org.apache.pig.impl.util.PigLogger;
 
 
 
@@ -49,6 +50,9 @@
  * cases where the user doesn't specify one.
  */
 public class SortedDataBag extends DataBag {
+
+    private final Log log = LogFactory.getLog(getClass());
+
     private Comparator<Tuple> mComp;
     private boolean mReadStarted = false;
 
@@ -108,7 +112,7 @@
             } catch (IOException ioe) {
                 // Do not remove last file from spilled array. It was not
                 // added as File.createTmpFile threw an IOException
-                PigLogger.getLogger().error(
+                log.error(
                     "Unable to create tmp file to spill to disk", ioe);
                 return 0;
             }
@@ -135,7 +139,7 @@
                 // Remove the last file from the spilled array, since we failed to
                 // write to it.
                 mSpillFiles.remove(mSpillFiles.size() - 1);
-                PigLogger.getLogger().error(
+                log.error(
                     "Unable to spill contents to disk", ioe);
                 return 0;
             } finally {
@@ -143,7 +147,7 @@
                     try {
                         out.close();
                     } catch (IOException e) {
-                        PigLogger.getLogger().error("Error closing spill", e);
+                        log.error("Error closing spill", e);
                     }
                 }
             }
@@ -254,7 +258,7 @@
                 } catch (FileNotFoundException fnfe) {
                     // We can't find our own spill file?  That should never
                     // happen.
-                    PigLogger.getLogger().fatal(
+                    log.fatal(
                         "Unable to find our spill file", fnfe);
                     throw new RuntimeException(fnfe);
                 }
@@ -268,11 +272,11 @@
                     } catch (EOFException eof) {
                         // This should never happen, it means we
                         // didn't dump all of our tuples to disk.
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Ran out of tuples too soon.", eof);
                         throw new RuntimeException("Ran out of tuples to read prematurely.");
                     } catch (IOException ioe) {
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to read our spill file", ioe);
                         throw new RuntimeException(ioe);
                     }
@@ -319,7 +323,7 @@
                     } catch (FileNotFoundException fnfe) {
                         // We can't find our own spill file?  That should
                         // never happen.
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to find our spill file", fnfe);
                         throw new RuntimeException(fnfe);
                     }
@@ -378,7 +382,7 @@
                     // this file.
                     mStreams.set(fileNum, null);
                 } catch (IOException ioe) {
-                    PigLogger.getLogger().fatal(
+                    log.fatal(
                         "Unable to read our spill file", ioe);
                     throw new RuntimeException(ioe);
                 }
@@ -442,7 +446,7 @@
                         } catch (FileNotFoundException fnfe) {
                             // We can't find our own spill file?  That should
                             // neer happen.
-                            PigLogger.getLogger().fatal(
+                            log.fatal(
                                 "Unable to find our spill file", fnfe);
                             throw new RuntimeException(fnfe);
                         }
@@ -461,7 +465,7 @@
                         }
                         out.flush();
                     } catch (IOException ioe) {
-                        PigLogger.getLogger().fatal(
+                        log.fatal(
                             "Unable to read our spill file", ioe);
                         throw new RuntimeException(ioe);
                     }

Modified: incubator/pig/trunk/src/org/apache/pig/data/TimestampedTuple.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/data/TimestampedTuple.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/data/TimestampedTuple.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/data/TimestampedTuple.java Tue Feb 12 13:22:40 2008
@@ -21,8 +21,13 @@
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 public class TimestampedTuple extends Tuple {
 
+    private final Log log = LogFactory.getLog(getClass());
+    
     protected double timestamp = 0;      // timestamp of this tuple
     protected boolean heartbeat = false;  // true iff this is a heartbeat (i.e. purpose is just to convey new timestamp; carries no data)
     
@@ -54,7 +59,7 @@
                 try{
                     timestamp = dateFormat.parse(splitString[i]).getTime()/1000.0;
                 }catch(ParseException e){
-                    System.err.println("Could not parse timestamp " + splitString[i]);
+                    log.error("Could not parse timestamp " + splitString[i]);
                 }
             }else{
                 fields.add(new DataAtom(splitString[i]));

Modified: incubator/pig/trunk/src/org/apache/pig/data/Tuple.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/data/Tuple.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/data/Tuple.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/data/Tuple.java Tue Feb 12 13:22:40 2008
@@ -24,12 +24,17 @@
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.WritableComparable;
 
 /**
  * an ordered list of Datums
  */
 public class Tuple extends Datum implements WritableComparable {
+    
+    private final Log log = LogFactory.getLog(getClass());
+    
     protected ArrayList<Datum> fields;
     static String              defaultDelimiter = "[,\t]";
     static String              NULL = "__PIG_NULL__";
@@ -156,7 +161,7 @@
         } else if (field instanceof Tuple) {
             Tuple t = (Tuple) field;
             if (t.arity() == 1) {
-                System.err.println("Warning: Asked for an atom field but found a tuple with one field.");
+                log.error("Warning: Asked for an atom field but found a tuple with one field.");
                 return t.getAtomField(0);
             }
         } else if (field instanceof DataBag) {

Modified: incubator/pig/trunk/src/org/apache/pig/impl/PigContext.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/PigContext.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/PigContext.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/PigContext.java Tue Feb 12 13:22:40 2008
@@ -17,9 +17,9 @@
  */
 package org.apache.pig.impl;
 
+import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.io.File;
 import java.io.InputStream;
 import java.io.Serializable;
 import java.lang.reflect.Constructor;
@@ -35,26 +35,30 @@
 import java.util.Properties;
 import java.util.Vector;
 
-import org.apache.hadoop.fs.FileSystem;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-
-import org.apache.pig.backend.datastorage.*;
-import org.apache.pig.backend.executionengine.*;
-import org.apache.pig.backend.local.executionengine.*;
-import org.apache.pig.backend.hadoop.datastorage.*;
-import org.apache.pig.backend.hadoop.executionengine.*;
-import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.MapReduceLauncher;
-import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.PigMapReduce;
-
-import org.apache.log4j.Logger;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.pig.Main;
 import org.apache.pig.PigServer.ExecType;
+import org.apache.pig.backend.datastorage.DataStorage;
+import org.apache.pig.backend.datastorage.DataStorageException;
+import org.apache.pig.backend.datastorage.ElementDescriptor;
+import org.apache.pig.backend.executionengine.ExecException;
+import org.apache.pig.backend.executionengine.ExecutionEngine;
+import org.apache.pig.backend.hadoop.datastorage.HDataStorage;
+import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
+import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.MapReduceLauncher;
+import org.apache.pig.backend.hadoop.executionengine.mapreduceExec.PigMapReduce;
+import org.apache.pig.backend.local.executionengine.LocalExecutionEngine;
 import org.apache.pig.impl.logicalLayer.LogicalPlanBuilder;
 import org.apache.pig.impl.util.JarManager;
-import org.apache.pig.impl.util.PigLogger;
 
 public class PigContext implements Serializable, FunctionInstantiator {
     private static final long serialVersionUID = 1L;
+    
+    private transient final Log log = LogFactory.getLog(getClass());
+    
     private static final String JOB_NAME_PREFIX= "PigLatin";
     
     /* NOTE: we only serialize some of the stuff 
@@ -83,8 +87,6 @@
     
     // handle to the back-end
     transient private ExecutionEngine executionEngine;
-    
-    transient private Logger                mLogger;
    
     private String jobName = JOB_NAME_PREFIX;    // can be overwritten by users
   
@@ -103,8 +105,6 @@
         
     public PigContext(ExecType execType){
         this.execType = execType;
-        
-    mLogger = PigLogger.getLogger(); 
 
         initProperties();
         
@@ -151,10 +151,10 @@
         //Now set these as system properties only if they are not already defined.
         for (Object o: fileProperties.keySet()){
             String propertyName = (String)o;
-            mLogger.debug("Found system property " + propertyName + " in .pigrc"); 
+            log.debug("Found system property " + propertyName + " in .pigrc"); 
             if (System.getProperty(propertyName) == null){
                 System.setProperty(propertyName, fileProperties.getProperty(propertyName));
-                mLogger.debug("Setting system property " + propertyName);
+                log.debug("Setting system property " + propertyName);
             }
         }
     }    
@@ -175,7 +175,7 @@
 
             case MAPREDUCE:
             {
-                executionEngine = new HExecutionEngine (this, mLogger);
+                executionEngine = new HExecutionEngine (this);
 
                 executionEngine.init();
                 
@@ -213,7 +213,7 @@
             executionEngine.updateConfiguration(trackerLocation);
         }
         catch (ExecException e) {
-            mLogger.error("Failed to set tracker at: " + newLocation);
+            log.error("Failed to set tracker at: " + newLocation);
         }
     }
     

Modified: incubator/pig/trunk/src/org/apache/pig/impl/builtin/ShellBagEvalFunc.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/builtin/ShellBagEvalFunc.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/builtin/ShellBagEvalFunc.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/builtin/ShellBagEvalFunc.java Tue Feb 12 13:22:40 2008
@@ -161,7 +161,7 @@
             if (c == recordDelim) {
                 inRecord = false;
                 Tuple t = new Tuple(baos.toString(), fieldDelimString);
-                // System.err.println(Thread.currentThread().getName() + ": Adding tuple " + t + " to collector " + output);
+                // log.error(Thread.currentThread().getName() + ": Adding tuple " + t + " to collector " + output);
                 output.add(t);
                 baos = new ByteArrayOutputStream();
                 continue;

Modified: incubator/pig/trunk/src/org/apache/pig/impl/eval/GenerateSpec.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/eval/GenerateSpec.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/eval/GenerateSpec.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/eval/GenerateSpec.java Tue Feb 12 13:22:40 2008
@@ -267,7 +267,7 @@
         
         public void exec(){
             specs.get(driver).setupPipe(this).add(cpiInput);
-            //System.err.println(Thread.currentThread().getName() + ": Executing driver on " + cpiInput);
+            //log.error(Thread.currentThread().getName() + ": Executing driver on " + cpiInput);
             successor.markStale(false);
         }
         

Modified: incubator/pig/trunk/src/org/apache/pig/impl/eval/cond/FuncCond.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/eval/cond/FuncCond.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/eval/cond/FuncCond.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/eval/cond/FuncCond.java Tue Feb 12 13:22:40 2008
@@ -21,6 +21,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.FilterFunc;
 import org.apache.pig.data.Datum;
 import org.apache.pig.data.Tuple;
@@ -30,6 +32,8 @@
 
 
 public class FuncCond extends Cond {
+    
+    private final Log log = LogFactory.getLog(getClass());
 
     private static final long serialVersionUID = 1L;
     
@@ -72,7 +76,7 @@
             
             return func.exec((Tuple)d);
         } catch (IOException e) {
-            System.out.println("Warning: filter function " + funcName + " failed. Substituting default value \'false\'.");
+            log.error("Warning: filter function " + funcName + " failed. Substituting default value \'false\'.", e);
             return false;
         }
     }

Modified: incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOEval.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOEval.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOEval.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOEval.java Tue Feb 12 13:22:40 2008
@@ -53,13 +53,13 @@
     @Override
     public TupleSchema outputSchema() {
         if (schema == null) {
-            //System.out.println("LOEval input: " + inputs[0].outputSchema());
-            //System.out.println("LOEval spec: " + spec);
+            //log.info("LOEval input: " + inputs[0].outputSchema());
+            //log.info("LOEval spec: " + spec);
             schema =
                 (TupleSchema) spec.getOutputSchemaForPipe(opTable.get(getInputs().get(0)).
                                                           outputSchema());
 
-            //System.out.println("LOEval output: " + schema);
+            //log.info("LOEval output: " + schema);
         }
         schema.setAlias(alias);
         return schema;

Modified: incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java Tue Feb 12 13:22:40 2008
@@ -21,6 +21,8 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.LoadFunc;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.io.FileSpec;
@@ -31,6 +33,8 @@
 
 public class LOLoad extends LogicalOperator {
     private static final long serialVersionUID = 1L;
+    
+    private final Log log = LogFactory.getLog(getClass());
 
     protected FileSpec inputFileSpec;
 
@@ -52,7 +56,7 @@
             while (cause != null
                    && cause.getClass().getName() !=
                    "java.lang.ClassNotFoundException") {
-                System.out.println("cause = " + cause.getClass().getName());
+                log.error("cause = " + cause.getClass().getName(), e);
                 cause = cause.getCause();
             } if (cause != null) {
                 throw new ParseException("Load function " +

Modified: incubator/pig/trunk/src/org/apache/pig/impl/util/JarManager.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/util/JarManager.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/util/JarManager.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/util/JarManager.java Tue Feb 12 13:22:40 2008
@@ -115,11 +115,11 @@
         Iterator<JarListEntry> it = jarList.iterator();
         while (it.hasNext()) {
             JarListEntry jarEntry = it.next();
-            // System.err.println("Adding " + jarEntry.jar + ":" + jarEntry.prefix);
+            // log.error("Adding " + jarEntry.jar + ":" + jarEntry.prefix);
             mergeJar(jarFile, jarEntry.jar, jarEntry.prefix, contents);
         }
         for (int i = 0; i < pigContext.extraJars.size(); i++) {
-            // System.err.println("Adding extra " + pigContext.extraJars.get(i));
+            // log.error("Adding extra " + pigContext.extraJars.get(i));
             mergeJar(jarFile, pigContext.extraJars.get(i), null, contents);
         }
         if (pigContext != null) {

Modified: incubator/pig/trunk/src/org/apache/pig/impl/util/PigLogger.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/util/PigLogger.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/util/PigLogger.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/util/PigLogger.java Tue Feb 12 13:22:40 2008
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.pig.impl.util;
-
-import org.apache.log4j.Logger;
-import org.apache.log4j.Level;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.PatternLayout;
-
-public class PigLogger 
-{
-
-private static Logger mLogger = null;
-private static boolean mHaveSetAppenders = false;
-
-/**
- * Get an instance of the underlying log4j logger.  This first makes sure
- * the PigLogger is initialized and then returns the underlying logger.
- */ 
-public static Logger getLogger()
-{
-    if (mLogger == null) {
-        mLogger = Logger.getLogger("org.apache.pig");
-    }
-    return mLogger;
-}
-
-/**
- * Set up a log appender for the junit tests, this way they cn write out log
- * messages.
- */
-public static void setAppenderForJunit()
-{
-    if (!mHaveSetAppenders) {
-        Logger log = getLogger();
-        log.setLevel(Level.INFO);
-        ConsoleAppender screen = new ConsoleAppender(new PatternLayout());
-        screen.setThreshold(Level.INFO);
-        screen.setTarget(ConsoleAppender.SYSTEM_ERR);
-        log.addAppender(screen);
-        mHaveSetAppenders = true;
-    }
-}
-
-
-}

Modified: incubator/pig/trunk/src/org/apache/pig/impl/util/SpillableMemoryManager.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/impl/util/SpillableMemoryManager.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/impl/util/SpillableMemoryManager.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/impl/util/SpillableMemoryManager.java Tue Feb 12 13:22:40 2008
@@ -5,10 +5,10 @@
 import java.lang.management.MemoryPoolMXBean;
 import java.lang.management.MemoryType;
 import java.lang.ref.WeakReference;
-import java.util.LinkedList;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
+import java.util.LinkedList;
 import java.util.List;
 
 import javax.management.Notification;
@@ -16,7 +16,8 @@
 import javax.management.NotificationListener;
 import javax.management.openmbean.CompositeData;
 
-import org.apache.log4j.Logger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 /**
  * This class Tracks the tenured pool and a list of Spillable objects. When memory gets low, this
@@ -28,6 +29,9 @@
  *
  */
 public class SpillableMemoryManager implements NotificationListener {
+    
+    private final Log log = LogFactory.getLog(getClass());
+    
     List<WeakReference<Spillable>> spillables = new LinkedList<WeakReference<Spillable>>();
     
     public SpillableMemoryManager() {
@@ -36,7 +40,7 @@
         MemoryPoolMXBean biggestHeap = null;
         long biggestSize = 0;
         for (MemoryPoolMXBean b: mpbeans) {
-            PigLogger.getLogger().debug("Found heap (" + b.getName() +
+            log.debug("Found heap (" + b.getName() +
                 ") of type " + b.getType());
             if (b.getType() == MemoryType.HEAP) {
                 /* Here we are making the leap of faith that the biggest
@@ -52,7 +56,7 @@
         if (biggestHeap == null) {
             throw new RuntimeException("Couldn't find heap");
         }
-        PigLogger.getLogger().debug("Selected heap to monitor (" +
+        log.debug("Selected heap to monitor (" +
             biggestHeap.getName() + ")");
         /* We set the threshold to be 50% of tenured since that is where
          * the GC starts to dominate CPU time according to Sun doc */
@@ -62,10 +66,10 @@
     public void handleNotification(Notification n, Object o) {
         CompositeData cd = (CompositeData) n.getUserData();
         MemoryNotificationInfo info = MemoryNotificationInfo.from(cd);
-        PigLogger.getLogger().info("low memory handler called " + info.getUsage());
+        log.info("low memory handler called " + info.getUsage());
         long toFree = info.getUsage().getUsed() - (long)(info.getUsage().getMax()*.5);
         if (toFree < 0) {
-            PigLogger.getLogger().debug("low memory handler returning " + 
+            log.debug("low memory handler returning " + 
                 "because there is nothing to free");
             return;
         }

Modified: incubator/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java Tue Feb 12 13:22:40 2008
@@ -19,6 +19,8 @@
 
 import java.io.BufferedReader;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.tools.grunt.GruntParser;
@@ -27,6 +29,8 @@
 
 public class Grunt 
 {
+    private final Log log = LogFactory.getLog(getClass());
+    
     BufferedReader in;
     PigServer pig;
     GruntParser parser;    
@@ -53,7 +57,7 @@
         parser.setInteractive(false);
         parser.parseStopOnError();
         } catch (Throwable e) {
-            System.err.println(e.getMessage());
+            log.error(e.getMessage());
     }
     
     }

Modified: incubator/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java (original)
+++ incubator/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java Tue Feb 12 13:22:40 2008
@@ -5,30 +5,27 @@
 import java.io.Reader;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Properties;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileUtil;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.RunningJob;
-
 import org.apache.pig.PigServer;
-import org.apache.pig.data.Tuple;
-import org.apache.pig.tools.pigscript.parser.ParseException;
-import org.apache.pig.tools.pigscript.parser.PigScriptParser;
-import org.apache.pig.tools.pigscript.parser.PigScriptParserTokenManager;
+import org.apache.pig.backend.datastorage.ContainerDescriptor;
 import org.apache.pig.backend.datastorage.DataStorage;
 import org.apache.pig.backend.datastorage.DataStorageException;
 import org.apache.pig.backend.datastorage.ElementDescriptor;
-import org.apache.pig.backend.datastorage.ContainerDescriptor;
 import org.apache.pig.backend.executionengine.ExecutionEngine;
 import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
+import org.apache.pig.data.Tuple;
+import org.apache.pig.tools.pigscript.parser.ParseException;
+import org.apache.pig.tools.pigscript.parser.PigScriptParser;
+import org.apache.pig.tools.pigscript.parser.PigScriptParserTokenManager;
 
 public class GruntParser extends PigScriptParser {
 
+    private final Log log = LogFactory.getLog(getClass());
 
     public GruntParser(Reader stream) {
         super(stream);
@@ -73,7 +70,7 @@
             }
             catch(Exception e)
             {
-                System.err.println(e.getMessage());
+                log.error(e.getMessage());
             }
     }
 
@@ -253,7 +250,7 @@
             else
             {    
                 job.killJob();
-                System.err.println("kill submited.");
+                log.error("kill submited.");
             }
         }
     }
@@ -319,20 +316,20 @@
 
     protected void printHelp() 
     {
-        System.err.println("Commands:");
-        System.err.println("<pig latin statement>;");
-        System.err.println("store <alias> into <filename> [using <functionSpec>]");
-        System.err.println("dump <alias>");
-        System.err.println("describe <alias>");
-        System.err.println("kill <job_id>");
-        System.err.println("ls <path>\r\ndu <path>\r\nmv <src> <dst>\r\ncp <src> <dst>\r\nrm <src>");
-        System.err.println("copyFromLocal <localsrc> <dst>\r\ncd <dir>\r\npwd");
-        System.err.println("cat <src>\r\ncopyToLocal <src> <localdst>\r\nmkdir <path>");
-        System.err.println("cd <path>");
-        System.err.println("define <functionAlias> <functionSpec>");
-        System.err.println("register <udfJar>");
-        System.err.println("set key value");
-        System.err.println("quit");
+        System.out.println("Commands:");
+        System.out.println("<pig latin statement>;");
+        System.out.println("store <alias> into <filename> [using <functionSpec>]");
+        System.out.println("dump <alias>");
+        System.out.println("describe <alias>");
+        System.out.println("kill <job_id>");
+        System.out.println("ls <path>\r\ndu <path>\r\nmv <src> <dst>\r\ncp <src> <dst>\r\nrm <src>");
+        System.out.println("copyFromLocal <localsrc> <dst>\r\ncd <dir>\r\npwd");
+        System.out.println("cat <src>\r\ncopyToLocal <src> <localdst>\r\nmkdir <path>");
+        System.out.println("cd <path>");
+        System.out.println("define <functionAlias> <functionSpec>");
+        System.out.println("register <udfJar>");
+        System.out.println("set key value");
+        System.out.println("quit");
     }
 
     protected void processMove(String src, String dst) throws IOException

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestCompressedFiles.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestCompressedFiles.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestCompressedFiles.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestCompressedFiles.java Tue Feb 12 13:22:40 2008
@@ -26,11 +26,16 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.DIFF;
 import junit.framework.TestCase;
 
 public class TestCompressedFiles extends TestCase {
+    
+    private final Log log = LogFactory.getLog(getClass());
+
     File datFile;
     File gzFile;
     @Override
@@ -72,7 +77,7 @@
         boolean success = true;
         while(it.hasNext()) {
             success = false;
-            System.out.println(it.next());
+            log.info(it.next());
         }
         assertTrue(success);
     }

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpNumeric.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpNumeric.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpNumeric.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpNumeric.java Tue Feb 12 13:22:40 2008
@@ -24,6 +24,8 @@
 
 import org.junit.Test;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.Tuple;
@@ -32,7 +34,8 @@
 
 public class TestFilterOpNumeric extends TestCase {
 
-    
+    private final Log log = LogFactory.getLog(getClass());
+
     private static int LOOP_COUNT = 1024;
     private String initString = "mapreduce";
     
@@ -51,7 +54,7 @@
         ps.close();
         pig.registerQuery("A=load 'file:" + tmpFile + "' using "+PigStorage.class.getName() +"(':');");
         String query = "A = filter A by $0 == $1;";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -82,7 +85,7 @@
         ps.close();
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 != $1;";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -110,7 +113,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 > $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -133,7 +136,7 @@
         ps.close();
         pig.registerQuery("A=load 'file:" + tmpFile + "';");
         String query = "A = foreach A generate ($1 >= '"+ LOOP_COUNT+"'-'10'?'1':'0');";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -162,7 +165,7 @@
         ps.close();
         pig.registerQuery("A=load 'file:" + tmpFile + "';");
         String query = "A = foreach A generate ($0 < '10'?($1 >= '5' ? '2': '1') : '0');";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -195,7 +198,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 < $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -227,7 +230,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 >= $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -257,7 +260,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 <= $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpString.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpString.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpString.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestFilterOpString.java Tue Feb 12 13:22:40 2008
@@ -24,6 +24,8 @@
 
 import org.junit.Test;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.Tuple;
@@ -32,6 +34,8 @@
 
 public class TestFilterOpString extends TestCase {
 
+    private final Log log = LogFactory.getLog(getClass());
+
     private static int LOOP_COUNT = 1024;    
     private String initString = "mapreduce";
     
@@ -51,7 +55,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 eq $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -79,7 +83,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 neq $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -107,7 +111,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 gt $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -140,7 +144,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 gte $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -169,7 +173,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 lt $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -200,7 +204,7 @@
         pig.registerQuery("A=load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':');");
         String query = "A = filter A by $0 lte $1;";
 
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestInfixArithmetic.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestInfixArithmetic.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestInfixArithmetic.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestInfixArithmetic.java Tue Feb 12 13:22:40 2008
@@ -24,6 +24,8 @@
 
 import org.junit.Test;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.Tuple;
@@ -32,6 +34,8 @@
 
 public class TestInfixArithmetic extends TestCase {
 
+    private final Log log = LogFactory.getLog(getClass());
+
     private static int LOOP_COUNT = 1024;    
     private String initString = "mapreduce";
     
@@ -45,7 +49,7 @@
         }
         ps.close();
         String query = "A = foreach (load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':')) generate $0, $0 + $1, $1;";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -67,7 +71,7 @@
         }
         ps.close();
         String query = "A = foreach (load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':')) generate $0, $0 - $1, $1 ;";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -88,7 +92,7 @@
         }
         ps.close();
         String query = "A = foreach (load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':')) generate $0, $0 * $1, $1 ;";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();
@@ -110,7 +114,7 @@
         }
         ps.close();
         String query = "A =  foreach (load 'file:" + tmpFile + "' using " + PigStorage.class.getName() + "(':')) generate $0, $0 / $1, $1;";
-        System.out.println(query);
+        log.info(query);
         pig.registerQuery(query);
         Iterator it = pig.openIterator("A");
         tmpFile.delete();

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestOrderBy.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestOrderBy.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestOrderBy.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestOrderBy.java Tue Feb 12 13:22:40 2008
@@ -67,7 +67,7 @@
         for(int i = 0; i < DATALEN; i++) {
             Tuple t = (Tuple)it.next();
             int value = t.getAtomField(1).numval().intValue();
-//            System.out.println("" + i + "," + DATA[0][i] + "," + DATA[1][i] + "," + value);
+//            log.info("" + i + "," + DATA[0][i] + "," + DATA[1][i] + "," + value);
             assertEquals(Integer.parseInt(DATA[col][i]), value);
         }
         assertFalse(it.hasNext());

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestPi.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestPi.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestPi.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestPi.java Tue Feb 12 13:22:40 2008
@@ -37,12 +37,16 @@
 import java.util.Iterator;
 import java.util.Random;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 /*
  * Testcase aimed at testing pig with large file sizes and filter and group functions
 */
 public class TestPi extends TestCase {
 	
+    private final Log log = LogFactory.getLog(getClass());
+
 	File datFile;
 	private long defaultBlockSize = (new Configuration()).getLong("dfs.block.size", 0);
 	
@@ -61,9 +65,9 @@
 	@Before
     protected void setUp() throws Exception{
 
-        System.out.println("Generating test data...");
-        System.out.println("Default block size = " + defaultBlockSize);
-        System.out.println("Total no. of iterations to run for test data = " + total);
+        log.info("Generating test data...");
+        log.info("Default block size = " + defaultBlockSize);
+        log.info("Total no. of iterations to run for test data = " + total);
         datFile = File.createTempFile("PiTest", ".dat");
         
         FileOutputStream dat = new FileOutputStream(datFile);
@@ -140,8 +144,8 @@
 		int totalPoints = Total.next().getAtomField(0).numval().intValue();
 		int inCirclePoints = InCircle.next().getAtomField(0).numval().intValue();
 
-		System.out.println("Value of PI = " + 4 * (double)inCircle / (double)total);
-		System.out.println("Value of PI (From Test data) = " + 4 * (double)inCirclePoints / (double)totalPoints);
+		log.info("Value of PI = " + 4 * (double)inCircle / (double)total);
+		log.info("Value of PI (From Test data) = " + 4 * (double)inCirclePoints / (double)totalPoints);
 		
 		
 		Iterator <Tuple> lengthTest = pig.openIterator("D");

Modified: incubator/pig/trunk/test/org/apache/pig/test/TestPigFile.java
URL: http://svn.apache.org/viewvc/incubator/pig/trunk/test/org/apache/pig/test/TestPigFile.java?rev=627115&r1=627114&r2=627115&view=diff
==============================================================================
--- incubator/pig/trunk/test/org/apache/pig/test/TestPigFile.java (original)
+++ incubator/pig/trunk/test/org/apache/pig/test/TestPigFile.java Tue Feb 12 13:22:40 2008
@@ -30,6 +30,8 @@
 import org.junit.Before;
 import org.junit.Test;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.BinStorage;
 import org.apache.pig.builtin.PigStorage;
@@ -40,6 +42,8 @@
 
 public class TestPigFile extends TestCase {
 
+    private final Log log = LogFactory.getLog(getClass());
+
     DataBag bag          = BagFactory.getInstance().newDefaultBag();
     Random rand = new Random();
     
@@ -47,7 +51,7 @@
 	@Before
     protected void setUp() throws Exception {
 
-        System.out.println("Generating PigFile test data...");
+        log.info("Generating PigFile test data...");
 
         Random rand = new Random();
 
@@ -62,7 +66,7 @@
             t.setField(j, r);
 
         }
-        System.out.println("Done.");
+        log.info("Done.");
     }
 
     @Override
@@ -74,16 +78,16 @@
     public void testStoreAndLoadText() throws IOException {
         PigContext pigContext = new PigContext(ExecType.LOCAL);
         
-        System.out.println("Running Store...");
+        log.info("Running Store...");
         String initialdata = File.createTempFile("pig-tmp", "").getAbsolutePath();
         PigFile store = new PigFile(initialdata);
         store.store(bag, new PigStorage(), pigContext);
-        System.out.println("Done.");
+        log.info("Done.");
 
-        System.out.println("Running Load...");
+        log.info("Running Load...");
         PigFile load = new PigFile(initialdata);
         DataBag loaded = load.load(new PigStorage(), pigContext);
-        System.out.println("Done.");
+        log.info("Done.");
 
         assertTrue(bag.size() == loaded.size());
 
@@ -147,22 +151,22 @@
 
     @Test
     public void testStoreAndLoadBin() throws IOException {
-        System.out.println("Generating Data ...");
+        log.info("Generating Data ...");
         bag = getRandomBag(5000,0);
-        System.out.println("Done.");
+        log.info("Done.");
         
         PigContext pigContext = new PigContext(ExecType.LOCAL);
         
-        System.out.println("Running Store...");
+        log.info("Running Store...");
         String storeFile = File.createTempFile("pig-tmp", "").getAbsolutePath();
         PigFile store = new PigFile(storeFile);
         store.store(bag, new BinStorage(), pigContext);
-        System.out.println("Done.");
+        log.info("Done.");
 
-        System.out.println("Running Load...");
+        log.info("Running Load...");
         PigFile load = new PigFile(storeFile);
         DataBag loaded = load.load(new BinStorage(), pigContext);
-        System.out.println("Done.");
+        log.info("Done.");
 
         assertTrue(bag.size() == loaded.size());