You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2009/08/03 20:59:13 UTC

svn commit: r800494 - in /hadoop/pig/trunk: ./ conf/ src/org/apache/pig/ src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/ src/org/apache/pig/impl/

Author: daijy
Date: Mon Aug  3 18:59:13 2009
New Revision: 800494

URL: http://svn.apache.org/viewvc?rev=800494&view=rev
Log:
PIG-882: log level not propogated to loggers

Modified:
    hadoop/pig/trunk/CHANGES.txt
    hadoop/pig/trunk/conf/log4j.properties
    hadoop/pig/trunk/src/org/apache/pig/Main.java
    hadoop/pig/trunk/src/org/apache/pig/PigServer.java
    hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
    hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    hadoop/pig/trunk/src/org/apache/pig/impl/PigContext.java

Modified: hadoop/pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Mon Aug  3 18:59:13 2009
@@ -44,6 +44,8 @@
 
 BUG FIXES
 
+    PIG-882: log level not propogated to loggers (daijy)
+
     PIG-880: Order by is borken with complex fields (sms)
     
     PIG-773: Empty complex constants (empty bag, empty tuple and empty map)

Modified: hadoop/pig/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/conf/log4j.properties?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/conf/log4j.properties (original)
+++ hadoop/pig/trunk/conf/log4j.properties Mon Aug  3 18:59:13 2009
@@ -1,5 +1,5 @@
 # ***** Set root logger level to DEBUG and its only appender to A.
-log4j.rootLogger=info, A
+log4j.logger.org.apache.pig=info, A
 
 # ***** A is set to be a ConsoleAppender.
 log4j.appender.A=org.apache.log4j.ConsoleAppender

Modified: hadoop/pig/trunk/src/org/apache/pig/Main.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/Main.java?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/Main.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/Main.java Mon Aug  3 18:59:13 2009
@@ -30,6 +30,7 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
 import org.apache.log4j.PropertyConfigurator;
 import org.apache.pig.ExecType;
 import org.apache.pig.impl.PigContext;
@@ -260,10 +261,11 @@
                      }
             }
         }
-        // configure logging
-        configureLog4J(properties);
         // create the context with the parameter
         PigContext pigContext = new PigContext(execType, properties);
+
+        // configure logging
+        configureLog4J(properties, pigContext);
         
         if(logFileName == null && !userSpecifiedLog) {
             logFileName = validateLogFile(null, null);
@@ -445,7 +447,7 @@
 }
 
 //TODO jz: log4j.properties should be used instead
-private static void configureLog4J(Properties properties) {
+private static void configureLog4J(Properties properties, PigContext pigContext) {
     // TODO Add a file appender for the logs
     // TODO Need to create a property in the properties file for it.
     // sgroschupf, 25Feb2008: this method will be obsolete with PIG-115.
@@ -460,35 +462,43 @@
         logLevel = Level.toLevel(logLevelString, Level.INFO);
     }
     
+    Properties props = new Properties();
     if (log4jconf != null) {
-         PropertyConfigurator.configure(log4jconf);
-     } else if (!brief ) {
-         // non-brief logging - timestamps
-         Properties props = new Properties();
-         props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE");
-         props.setProperty("log4j.appender.PIGCONSOLE",
-                           "org.apache.log4j.ConsoleAppender");
-         props.setProperty("log4j.appender.PIGCONSOLE.layout",
-                           "org.apache.log4j.PatternLayout");
-         props.setProperty("log4j.appender.PIGCONSOLE.layout.ConversionPattern",
-                           "%d [%t] %-5p %c - %m%n");
-         props.setProperty("log4j.appender.PIGCONSOLE.target",
-         "System.err");
-         PropertyConfigurator.configure(props);
-     } else {
-         // brief logging - no timestamps
-         Properties props = new Properties();
-         props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE");
-         props.setProperty("log4j.appender.PIGCONSOLE",
-                           "org.apache.log4j.ConsoleAppender");
-         props.setProperty("log4j.appender.PIGCONSOLE.layout",
-                           "org.apache.log4j.PatternLayout");
-         props.setProperty("log4j.appender.PIGCONSOLE.layout.ConversionPattern",
-                           "%m%n");
-         props.setProperty("log4j.appender.PIGCONSOLE.target",
-         "System.err");
-         PropertyConfigurator.configure(props);
-     }
+        try {
+            FileReader propertyReader = new FileReader(log4jconf);
+            props.load(propertyReader);
+        }
+        catch (IOException e)
+        {
+            System.err.println("Warn: Cannot open log4j properties file, use default");
+        }
+    }
+    if (props.size() == 0) {
+        props.setProperty("log4j.logger.org.apache.pig", logLevel.toString()+", PIGCONSOLE");
+        props.setProperty("log4j.appender.PIGCONSOLE",
+                "org.apache.log4j.ConsoleAppender");
+        props.setProperty("log4j.appender.PIGCONSOLE.layout",
+                "org.apache.log4j.PatternLayout");
+        props.setProperty("log4j.appender.PIGCONSOLE.target", "System.err");
+
+        if (!brief) {
+            // non-brief logging - timestamps
+            props.setProperty(
+                    "log4j.appender.PIGCONSOLE.layout.ConversionPattern",
+                    "%d [%t] %-5p %c - %m%n");
+        } else {
+            // brief logging - no timestamps
+            props.setProperty(
+                    "log4j.appender.PIGCONSOLE.layout.ConversionPattern",
+                    "%m%n");
+        }
+    }
+    PropertyConfigurator.configure(props);
+    logLevel = Logger.getLogger("org.apache.pig").getLevel();
+    Properties backendProps = pigContext.getLog4jProperties();
+    backendProps.setProperty("log4j.logger.org.apache.pig.level", logLevel.toString());
+    pigContext.setLog4jProperties(backendProps);
+    pigContext.setDefaultLogLevel(logLevel);
 }
  
 // returns the stream of final pig script to be passed to Grunt

Modified: hadoop/pig/trunk/src/org/apache/pig/PigServer.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/PigServer.java?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/PigServer.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/PigServer.java Mon Aug  3 18:59:13 2009
@@ -36,6 +36,9 @@
 import java.util.Properties;
 import java.util.Set;
 import java.util.Stack;
+
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
 import org.apache.pig.impl.plan.PlanException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -178,11 +181,13 @@
     }
     
     public void debugOn() {
-        pigContext.debug = true;
+        Logger.getLogger("org.apache.pig").setLevel(Level.DEBUG);
+        pigContext.getLog4jProperties().setProperty("log4j.logger.org.apache.pig", Level.DEBUG.toString());
     }
     
     public void debugOff() {
-        pigContext.debug = false;
+        Logger.getLogger("org.apache.pig").setLevel(pigContext.getDefaultLogLevel());
+        pigContext.getLog4jProperties().setProperty("log4j.logger.org.apache.pig", pigContext.getDefaultLogLevel().toString());
     }
     
     public void setDefaultParallel(int p) {
@@ -321,7 +326,7 @@
             resourceLocation = urls.nextElement();
         }
         
-        if (pigContext.debug && urls.hasMoreElements()) {
+        if (urls.hasMoreElements()) {
             String logMessage = "Found multiple resources that match " 
                 + jarName + ": " + resourceLocation;
             

Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java Mon Aug  3 18:59:13 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.log4j.PropertyConfigurator;
 
 import org.apache.pig.PigException;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -96,6 +97,10 @@
             sJobConf = jConf;
             try {
                 PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(jConf.get("udf.import.list")));
+                pigContext = (PigContext)ObjectSerializer.deserialize(jConf.get("pig.pigContext"));
+                if (pigContext.getLog4jProperties()!=null)
+                    PropertyConfigurator.configure(pigContext.getLog4jProperties());
+                
                 cp = (PhysicalPlan) ObjectSerializer.deserialize(jConf
                         .get("pig.combinePlan"));
                 pack = (POPackage)ObjectSerializer.deserialize(jConf.get("pig.combine.package"));
@@ -118,9 +123,6 @@
                     roots = cp.getRoots().toArray(new PhysicalOperator[1]);
                     leaf = cp.getLeaves().get(0);
                 }
-                
-                pigContext = (PigContext)ObjectSerializer.deserialize(jConf.get("pig.pigContext"));
-                
             } catch (IOException ioe) {
                 String msg = "Problem while configuring combiner's reduce plan.";
                 throw new RuntimeException(msg, ioe);

Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java Mon Aug  3 18:59:13 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.log4j.PropertyConfigurator;
 import org.apache.pig.PigException;
 import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.data.TargetedTuple;
@@ -147,6 +148,10 @@
         PigMapReduce.sJobConf = job;
         try {
             PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(job.get("udf.import.list")));
+            pigContext = (PigContext)ObjectSerializer.deserialize(job.get("pig.pigContext"));
+            if (pigContext.getLog4jProperties()!=null)
+                PropertyConfigurator.configure(pigContext.getLog4jProperties());
+            
             mp = (PhysicalPlan) ObjectSerializer.deserialize(
                 job.get("pig.mapPlan"));
             stores = PlanHelper.getStores(mp);
@@ -176,7 +181,7 @@
                 leaf = mp.getLeaves().get(0);
             }
             
-            pigContext = (PigContext)ObjectSerializer.deserialize(job.get("pig.pigContext"));
+            
             
         } catch (IOException ioe) {
             String msg = "Problem while configuring map plan.";

Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java Mon Aug  3 18:59:13 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.log4j.PropertyConfigurator;
 
 import org.apache.pig.PigException;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -276,6 +277,10 @@
             sJobConf = jConf;
             try {
                 PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(jConf.get("udf.import.list")));
+                pigContext = (PigContext)ObjectSerializer.deserialize(jConf.get("pig.pigContext"));
+                if (pigContext.getLog4jProperties()!=null)
+                    PropertyConfigurator.configure(pigContext.getLog4jProperties());
+                
                 rp = (PhysicalPlan) ObjectSerializer.deserialize(jConf
                         .get("pig.reducePlan"));
                 stores = PlanHelper.getStores(rp);
@@ -298,9 +303,6 @@
                     roots = rp.getRoots().toArray(new PhysicalOperator[1]);
                     leaf = rp.getLeaves().get(0);
                 }
-                
-                pigContext = (PigContext)ObjectSerializer.deserialize(jConf.get("pig.pigContext"));
-                
             } catch (IOException ioe) {
                 String msg = "Problem while configuring reduce plan.";
                 throw new RuntimeException(msg, ioe);

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/PigContext.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/PigContext.java?rev=800494&r1=800493&r2=800494&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/PigContext.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/PigContext.java Mon Aug  3 18:59:13 2009
@@ -41,6 +41,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.log4j.Level;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.Main;
 import org.apache.pig.ExecType;
@@ -115,7 +116,9 @@
 
     private static ArrayList<String> packageImportList = new ArrayList<String>();
 
-    public boolean debug = true;
+    private Properties log4jProperties = new Properties();
+    
+    private Level defaultLogLevel; 
     
     public int defaultParallel = -1;
 
@@ -639,4 +642,20 @@
     {
         packageImportList = list;
     }
+    public void setLog4jProperties(Properties p)
+    {
+        log4jProperties = p;
+    }
+    public Properties getLog4jProperties()
+    {
+        return log4jProperties;
+    }
+    public Level getDefaultLogLevel()
+    {
+        return defaultLogLevel;
+    }
+    public void setDefaultLogLevel(Level l)
+    {
+        defaultLogLevel = l;
+    }
 }