You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by sm...@apache.org on 2009/07/17 03:02:15 UTC
svn commit: r794929 - in /hadoop/pig/trunk: ./ src/org/apache/pig/
src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/
src/org/apache/pig/impl/util/ src/org/apache/pig/tools/grunt/
Author: sms
Date: Fri Jul 17 01:02:14 2009
New Revision: 794929
URL: http://svn.apache.org/viewvc?rev=794929&view=rev
Log:
PIG-728: All backend error messages must be logged to preserve the original error messages
Modified:
hadoop/pig/trunk/CHANGES.txt
hadoop/pig/trunk/src/org/apache/pig/Main.java
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
hadoop/pig/trunk/src/org/apache/pig/impl/util/LogUtils.java
hadoop/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java
hadoop/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java
Modified: hadoop/pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Fri Jul 17 01:02:14 2009
@@ -40,6 +40,9 @@
BUG FIXES
+ PIG-728: All backend error messages must be logged to preserve the
+ original error messages (sms)
+
PIG-877: Push up filter does not account for added columns in foreach
(sms)
Modified: hadoop/pig/trunk/src/org/apache/pig/Main.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/Main.java?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/Main.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/Main.java Fri Jul 17 01:02:14 2009
@@ -406,12 +406,12 @@
}
if(!gruntCalled) {
- LogUtils.writeLog(pe, logFileName, log, verbose);
+ LogUtils.writeLog(pe, logFileName, log, verbose, "Error before Pig is launched");
}
} catch (Throwable e) {
rc = 2;
if(!gruntCalled) {
- LogUtils.writeLog(e, logFileName, log, verbose);
+ LogUtils.writeLog(e, logFileName, log, verbose, "Error before Pig is launched");
}
} finally {
// clear temp files
Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/Launcher.java Fri Jul 17 01:02:14 2009
@@ -160,19 +160,21 @@
protected void getStats(Job job, JobClient jobClient, boolean errNotDbg, PigContext pigContext) throws Exception {
JobID MRJobID = job.getAssignedJobID();
String jobMessage = job.getMessage();
+ Exception backendException = null;
if(MRJobID == null) {
try {
- throw getExceptionFromString(jobMessage);
+ LogUtils.writeLog("Backend error message during job submission", jobMessage,
+ pigContext.getProperties().getProperty("pig.logfile"),
+ log);
+ backendException = getExceptionFromString(jobMessage);
} catch (Exception e) {
//just get the first line in the message and log the rest
String firstLine = getFirstLineFromMessage(jobMessage);
-
- LogUtils.writeLog(new Exception(jobMessage), pigContext.getProperties().getProperty("pig.logfile"),
- log, false, null, false, false);
int errCode = 2997;
String msg = "Unable to recreate exception from backend error: " + firstLine;
- throw new ExecException(msg, errCode, PigException.BUG, e);
+ throw new ExecException(msg, errCode, PigException.BUG);
}
+ throw backendException;
}
try {
TaskReport[] mapRep = jobClient.getMapTaskReports(MRJobID);
@@ -223,15 +225,16 @@
//errNotDbg is used only for failed jobs
//keep track of all the unique exceptions
try {
+ LogUtils.writeLog("Backend error message", msgs[j],
+ pigContext.getProperties().getProperty("pig.logfile"),
+ log);
Exception e = getExceptionFromString(msgs[j]);
exceptions.add(e);
} catch (Exception e1) {
String firstLine = getFirstLineFromMessage(msgs[j]);
- LogUtils.writeLog(new Exception(msgs[j]), pigContext.getProperties().getProperty("pig.logfile"),
- log, false, null, false, false);
int errCode = 2997;
String msg = "Unable to recreate exception from backed error: " + firstLine;
- throw new ExecException(msg, errCode, PigException.BUG, e1);
+ throw new ExecException(msg, errCode, PigException.BUG);
}
} else {
log.debug("Error message from task (" + type + ") " +
@@ -587,7 +590,7 @@
return new StackTraceElement(declaringClass, methodName, fileName, lineNumber);
}
- private String getFirstLineFromMessage(String message) {
+ protected String getFirstLineFromMessage(String message) {
String[] messages = message.split(newLine);
if(messages.length > 0) {
return messages[0];
Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java Fri Jul 17 01:02:14 2009
@@ -57,6 +57,7 @@
import org.apache.pig.impl.plan.CompilationMessageCollector.Message;
import org.apache.pig.impl.plan.CompilationMessageCollector.MessageType;
import org.apache.pig.impl.util.ConfigurationValidator;
+import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.io.FileSpec;
import org.apache.pig.tools.pigstats.PigStats;
@@ -70,6 +71,7 @@
//used to track the exception thrown by the job control which is run in a separate thread
private Exception jobControlException = null;
+ private String jobControlExceptionStackTrace = null;
private boolean aggregateWarning = false;
private Map<FileSpec, Exception> failureMap;
@@ -148,6 +150,11 @@
//no jobs to check for failure
if(jobControlException != null) {
if(jobControlException instanceof PigException) {
+ if(jobControlExceptionStackTrace != null) {
+ LogUtils.writeLog("Error message from job controller", jobControlExceptionStackTrace,
+ pc.getProperties().getProperty("pig.logfile"),
+ log);
+ }
throw jobControlException;
} else {
int errCode = 2117;
@@ -372,12 +379,13 @@
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
throwable.printStackTrace(ps);
- String exceptionString = baos.toString();
+ jobControlExceptionStackTrace = baos.toString();
try {
- jobControlException = getExceptionFromString(exceptionString);
+ jobControlException = getExceptionFromString(jobControlExceptionStackTrace);
} catch (Exception e) {
- String errMsg = "Could not resolve error that occured when launching map reduce job.";
- jobControlException = new RuntimeException(errMsg, e);
+ String errMsg = "Could not resolve error that occured when launching map reduce job: "
+ + getFirstLineFromMessage(jobControlExceptionStackTrace);
+ jobControlException = new RuntimeException(errMsg);
}
}
}
Modified: hadoop/pig/trunk/src/org/apache/pig/impl/util/LogUtils.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/util/LogUtils.java?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/util/LogUtils.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/util/LogUtils.java Fri Jul 17 01:02:14 2009
@@ -68,14 +68,22 @@
}
- public static void writeLog(Throwable t, String logFileName, Log log, boolean verbose) {
- writeLog(t, logFileName, log, verbose, null, true, true);
+ public static void writeLog(Throwable t, String logFileName, Log log, boolean verbose, String headerMessage) {
+ writeLog(t, logFileName, log, verbose, headerMessage, true, true);
}
public static void writeLog(Throwable t, String logFileName, Log log, boolean verbose,
String headerMessage, boolean displayFooter, boolean displayMessage) {
String message = null;
+ String marker = null;
+ StringBuilder sb = new StringBuilder("=");
+
+ for(int i = 0; i < 79; ++i) {
+ sb.append("=");
+ }
+ sb.append("\n");
+ marker = sb.toString();
if(t instanceof Exception) {
Exception pe = LogUtils.getPermissionException((Exception)t);
@@ -113,7 +121,7 @@
if(logFileName == null) {
//if exec is invoked programmatically then logFileName will be null
- log.warn("There is no log file to write to");
+ log.warn("There is no log file to write to.");
log.error(bs.toString());
return;
}
@@ -122,9 +130,24 @@
File logFile = new File(logFileName);
try {
fos = new FileOutputStream(logFile, true);
- if(headerMessage != null) fos.write((headerMessage + "\n").getBytes("UTF-8"));
- fos.write((message + "\n").getBytes("UTF-8"));
- fos.write(bs.toString().getBytes("UTF-8"));
+ if(headerMessage != null) {
+ fos.write((headerMessage + "\n").getBytes("UTF-8"));
+ sb = new StringBuilder("-");
+ for(int i = 1; i < headerMessage.length(); ++i) {
+ sb.append("-");
+ }
+ sb.append("\n");
+ fos.write(sb.toString().getBytes("UTF-8"));
+ }
+ if(message != null) {
+ if(message.charAt(message.length() - 1) == '\n') {
+ fos.write((message + "\n").getBytes("UTF-8"));
+ } else {
+ fos.write((message + "\n\n").getBytes("UTF-8"));
+ }
+ }
+ fos.write(bs.toString().getBytes("UTF-8"));
+ fos.write(marker.getBytes("UTF-8"));
fos.close();
if(displayFooter) {
if(verbose) {
@@ -137,7 +160,43 @@
log.warn("Could not write to log file: " + logFileName + " :" + ioe.getMessage());
log.error(bs.toString());
}
- }
+ }
+
+ public static void writeLog(String headerMessage, String message, String logFileName, Log log) {
+ if(logFileName == null) {
+ //if exec is invoked programmatically then logFileName will be null
+ log.warn("There is no log file to write to.");
+ log.error(message);
+ return;
+ }
+
+
+ File logFile = new File(logFileName);
+ FileOutputStream fos = null;
+ try {
+ fos = new FileOutputStream(logFile, true);
+ if(headerMessage != null) {
+ fos.write((headerMessage + "\n").getBytes("UTF-8"));
+ StringBuilder sb = new StringBuilder("-");
+ for(int i = 1; i < headerMessage.length(); ++i) {
+ sb.append("-");
+ }
+ sb.append("\n");
+ fos.write(sb.toString().getBytes("UTF-8"));
+ }
+ if(message != null) {
+ if(message.charAt(message.length() - 1) == '\n') {
+ fos.write((message + "\n").getBytes("UTF-8"));
+ } else {
+ fos.write((message + "\n\n").getBytes("UTF-8"));
+ }
+ }
+ fos.close();
+ } catch (IOException ioe) {
+ log.warn("Could not write to log file: " + logFileName + " :" + ioe.getMessage());
+ log.error(message);
+ }
+ }
}
Modified: hadoop/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/tools/grunt/Grunt.java Fri Jul 17 01:02:14 2009
@@ -75,7 +75,8 @@
parser.parseStopOnError();
break;
} catch(Throwable t) {
- LogUtils.writeLog(t, pig.getPigContext().getProperties().getProperty("pig.logfile"), log, verbose);
+ LogUtils.writeLog(t, pig.getPigContext().getProperties().getProperty("pig.logfile"),
+ log, verbose, "Pig Stack Trace");
parser.ReInit(in);
}
}
@@ -87,7 +88,8 @@
parser.setInteractive(false);
return parser.parseStopOnError();
} catch (Throwable t) {
- LogUtils.writeLog(t, pig.getPigContext().getProperties().getProperty("pig.logfile"), log, verbose);
+ LogUtils.writeLog(t, pig.getPigContext().getProperties().getProperty("pig.logfile"),
+ log, verbose, "Pig Stack Trace");
throw (t);
}
}
Modified: hadoop/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java?rev=794929&r1=794928&r2=794929&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/tools/grunt/GruntParser.java Fri Jul 17 01:02:14 2009
@@ -118,7 +118,8 @@
job.getException(),
mPigServer.getPigContext().getProperties().getProperty("pig.logfile"),
log,
- "true".equalsIgnoreCase(mPigServer.getPigContext().getProperties().getProperty("verbose")));
+ "true".equalsIgnoreCase(mPigServer.getPigContext().getProperties().getProperty("verbose")),
+ "Pig Stack Trace");
}
}
else {
@@ -542,7 +543,7 @@
else
{
job.killJob();
- log.error("kill submitted.");
+ log.info("Kill " + id + " submitted.");
}
}
}