You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ol...@apache.org on 2009/01/24 00:07:32 UTC

svn commit: r737239 [4/6] - in /hadoop/pig/trunk: ./ src/org/apache/pig/ src/org/apache/pig/backend/ src/org/apache/pig/backend/datastorage/ src/org/apache/pig/backend/executionengine/ src/org/apache/pig/backend/executionengine/util/ src/org/apache/pig...

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/FrontendException.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/FrontendException.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/FrontendException.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/FrontendException.java Fri Jan 23 23:07:30 2009
@@ -17,21 +17,141 @@
  */
 package org.apache.pig.impl.logicalLayer;
 
-public class FrontendException extends Exception {
+import org.apache.pig.PigException;
 
-    public FrontendException (String message, Throwable cause) {
-        super(message, cause);
-    }
+public class FrontendException extends PigException {
 
+    /**
+     * Create a new FrontendException with null as the error message.
+     */
     public FrontendException() {
-        this(null, null);
+        super();
     }
     
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     */
     public FrontendException(String message) {
-        this(message, null);
+        super(message);
     }
     
+    /**
+     * Create a new FrontendException with the specified cause.
+     *
+     * @param cause - The cause (which is saved for later retrieval by the <link>Throwable.getCause()</link> method) indicating the source of this exception. A null value is permitted, and indicates that the cause is nonexistent or unknown.
+     */
     public FrontendException(Throwable cause) {
-        this(null, cause);
+        super(cause);
+    }
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param cause - The cause (which is saved for later retrieval by the <link>Throwable.getCause()</link> method) indicating the source of this exception. A null value is permitted, and indicates that the cause is nonexistent or unknown.
+     */
+    public FrontendException(String message, Throwable cause) {
+        super(message, cause);
     }
-}
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     */
+    public FrontendException(String message, int errCode) {
+        super(message, errCode);
+    }
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param cause - The cause (which is saved for later retrieval by the <link>Throwable.getCause()</link> method) indicating the source of this exception. A null value is permitted, and indicates that the cause is nonexistent or unknown. 
+     */
+    public FrontendException(String message, int errCode, Throwable cause) {
+        super(message, errCode, cause);
+    }
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param errSrc - The error source 
+     */
+    public FrontendException(String message, int errCode, byte errSrc) {
+        super(message, errCode, errSrc);
+    }   
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param errSrc - The error source
+     * @param cause - The cause (which is saved for later retrieval by the <link>Throwable.getCause()</link> method) indicating the source of this exception. A null value is permitted, and indicates that the cause is nonexistent or unknown. 
+     */
+    public FrontendException(String message, int errCode, byte errSrc,
+            Throwable cause) {
+        super(message, errCode, errSrc, cause);
+    }
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param retry - If the exception is retriable or not
+     */ 
+    public FrontendException(String message, int errCode, boolean retry) {
+        super(message, errCode, retry);
+    }
+
+    /**
+     * Create a new FrontendException with the specified message and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param errSrc - The error source 
+     * @param retry - If the exception is retriable or not
+     */
+    public FrontendException(String message, int errCode, byte errSrc,
+            boolean retry) {
+        super(message, errCode, errSrc, retry);
+    }    
+
+	/**
+     * Create a new FrontendException with the specified message, error code, error source, retriable or not, detalied message for the developer and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param errSrc - The error source 
+     * @param retry - If the exception is retriable or not
+     * @param detailedMsg - The detailed message shown to the developer 
+     */
+	public FrontendException(String message, int errCode, byte errSrc,
+			boolean retry, String detailedMsg) {
+		super(message, errCode, errSrc, retry, detailedMsg);
+	}
+    
+    /**
+     * Create a new FrontendException with the specified message, error code, error source, retriable or not, detalied message for the developer and cause.
+     *
+     * @param message - The error message (which is saved for later retrieval by the <link>Throwable.getMessage()</link> method) shown to the user 
+     * @param errCode - The error code shown to the user 
+     * @param errSrc - The error source 
+     * @param retry - If the exception is retriable or not
+     * @param detailedMsg - The detailed message shown to the developer 
+     * @param cause - The cause (which is saved for later retrieval by the <link>Throwable.getCause()</link> method) indicating the source of this exception. A null value is permitted, and indicates that the cause is nonexistent or unknown.
+     */
+    public FrontendException(String message, int errCode, byte errSrc,
+            boolean retry, String detailedMsg, Throwable cause) {
+        super(message, errCode, errSrc, retry, detailedMsg, cause);
+    }
+
+}
\ No newline at end of file

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOBinCond.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOBinCond.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOBinCond.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOBinCond.java Fri Jan 23 23:07:30 2009
@@ -19,7 +19,6 @@
 package org.apache.pig.impl.logicalLayer;
 
 
-import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCogroup.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCogroup.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCogroup.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCogroup.java Fri Jan 23 23:07:30 2009
@@ -26,10 +26,10 @@
 import java.util.HashMap;
 import java.util.Iterator;
 
+import org.apache.pig.PigException;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.VisitorException;
-import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.parser.ParseException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.optimizer.SchemaRemover;
@@ -279,19 +279,15 @@
                 if(mergedGroupSchema.size() != groupBySchema.size()) {
                     mSchema = null;
                     mIsSchemaComputed = false;
-                    throw new FrontendException("Internal error. Mismatch in group by arities. Expected: " + mergedGroupSchema + ". Found: " + groupBySchema);
+                    int errCode = 2000;
+                    String msg = "Internal error. Mismatch in group by arities. Expected: " + mergedGroupSchema + ". Found: " + groupBySchema;
+                    throw new FrontendException(msg, errCode, PigException.BUG, false, null);
                 } else {
                     for(int i = 0; i < mergedGroupSchema.size(); ++i) {
-                        try {
-                            Schema.FieldSchema mergedFs = mergedGroupSchema.getField(i);
-                            Schema.FieldSchema groupFs = groupBySchema.getField(i);
-                            mergedFs.alias = groupFs.alias;
-                            mergedGroupSchema.addAlias(mergedFs.alias, mergedFs);
-                        } catch (ParseException pe) {
-                            mSchema = null;
-                            mIsSchemaComputed = false;
-                            throw new FrontendException(pe.getMessage());
-                        }
+                        Schema.FieldSchema mergedFs = mergedGroupSchema.getField(i);
+                        Schema.FieldSchema groupFs = groupBySchema.getField(i);
+                        mergedFs.alias = groupFs.alias;
+                        mergedGroupSchema.addAlias(mergedFs.alias, mergedFs);
                     }
                 }
                 
@@ -371,8 +367,10 @@
      */
     public byte getAtomicGroupByType() throws FrontendException {
         if (isTupleGroupCol()) {
-            throw new FrontendException("getAtomicGroupByType is used only when"
-                                     + " dealing with atomic group col") ;
+            int errCode = 1010;
+            String msg = "getAtomicGroupByType is used only when"
+                + " dealing with atomic group col";
+            throw new FrontendException(msg, errCode, PigException.INPUT, false, null) ;
         }
 
         byte groupType = DataType.BYTEARRAY ;
@@ -383,8 +381,10 @@
             List<LogicalPlan> innerPlans
                         = new ArrayList<LogicalPlan>(getGroupByPlans().get(input)) ;
             if (innerPlans.size() != 1) {
-                throw new FrontendException("Each COGroup input has to have "
-                                         + "the same number of inner plans") ;
+                int errCode = 1012;
+                String msg = "Each COGroup input has to have "
+                + "the same number of inner plans";
+                throw new FrontendException(msg, errCode, PigException.INPUT, false, null) ;
             }
             byte innerType = innerPlans.get(0).getSingleLeafPlanOutputType() ;
             groupType = DataType.mergeType(groupType, innerType) ;
@@ -400,8 +400,10 @@
      */
     public Schema getTupleGroupBySchema() throws FrontendException {
         if (!isTupleGroupCol()) {
-            throw new FrontendException("getTupleGroupBySchema is used only when"
-                                     + " dealing with tuple group col") ;
+            int errCode = 1011;
+            String msg = "getTupleGroupBySchema is used only when"
+                + " dealing with tuple group col";
+            throw new FrontendException(msg, errCode, PigException.INPUT, false, null) ;
         }
 
         // this fsList represents all the columns in group tuple
@@ -444,8 +446,9 @@
             }
 
             if(seenProjectStar) {
-                throw new FrontendException("Grouping attributes can either be star (*) or a list of expressions, but not both.");
-                
+                int errCode = 1013;
+                String msg = "Grouping attributes can either be star (*) or a list of expressions, but not both.";
+                throw new FrontendException(msg, errCode, PigException.INPUT, false, null);                
             }
 
         }

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOConst.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOConst.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOConst.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOConst.java Fri Jan 23 23:07:30 2009
@@ -18,6 +18,7 @@
 
 package org.apache.pig.impl.logicalLayer;
 
+import org.apache.pig.PigException;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;
 import org.apache.pig.impl.plan.VisitorException;
@@ -68,9 +69,9 @@
             } catch (Exception e) {
                 mFieldSchema = null;
                 mIsFieldSchemaComputed = false;
-                System.err.println("LOConst: " + e.getMessage());
-                e.printStackTrace();
-                throw new FrontendException(e.getMessage());
+                int errCode = 1015;
+                String msg = "Error determining fieldschema of constant: " + this;
+                throw new FrontendException(msg, errCode, PigException.INPUT, false, null, e);
             }
         }
         return mFieldSchema;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCross.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCross.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCross.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOCross.java Fri Jan 23 23:07:30 2009
@@ -27,6 +27,7 @@
 import java.util.Iterator;
 import java.io.IOException;
 
+import org.apache.pig.PigException;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
@@ -135,7 +136,7 @@
                 }
             }
             if(duplicates) {
-                String errMessage = "Found duplicates in schema ";
+                String errMessage = "Found duplicates in schema. ";
                 if(duplicateAliases.size() > 0) {
                     Iterator<String> iter = duplicateAliases.iterator();
                     errMessage += ": " + iter.next();
@@ -143,7 +144,9 @@
                         errMessage += ", " + iter.next();
                     }
                 }
-                throw new FrontendException(errMessage);
+                errMessage += ". Please alias the columns with unique names.";
+                int errCode = 1007;
+                throw new FrontendException(errMessage, errCode, PigException.INPUT, false, null);
             }
             mSchema = new Schema(fss);
             //add the aliases that are unique after flattening

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LODistinct.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LODistinct.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LODistinct.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LODistinct.java Fri Jan 23 23:07:30 2009
@@ -22,7 +22,7 @@
 import java.util.ArrayList;
 import java.io.IOException;
 
-import org.apache.pig.impl.logicalLayer.FrontendException;
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;
@@ -61,7 +61,9 @@
             try {
                 LogicalOperator op = s.iterator().next();
                 if (null == op) {
-                    throw new FrontendException("Could not find operator in plan");
+                    int errCode = 1006;
+                    String msg = "Could not find operator in plan";
+                    throw new FrontendException(msg, errCode, PigException.BUG, false, null);
                 }
                 if(op instanceof ExpressionOperator) {
                     Schema.FieldSchema fs = new Schema.FieldSchema(((ExpressionOperator)op).getFieldSchema());

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOFilter.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOFilter.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOFilter.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOFilter.java Fri Jan 23 23:07:30 2009
@@ -19,7 +19,7 @@
 
 import java.util.List;
 import java.util.ArrayList;
-import org.apache.pig.impl.logicalLayer.FrontendException;
+
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOForEach.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOForEach.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOForEach.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOForEach.java Fri Jan 23 23:07:30 2009
@@ -23,7 +23,8 @@
 import java.util.HashMap;
 import java.util.Set;
 import java.util.Iterator;
-import org.apache.pig.impl.logicalLayer.FrontendException;
+
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.SchemaMergeException;
 import org.apache.pig.impl.logicalLayer.optimizer.SchemaRemover;
@@ -211,16 +212,20 @@
 					            // check that indeed we only have one field schema
 					            // which is that of a tuple
 					            if(s.getFields().size() != 1) {
-					                throw new FrontendException("Expected a bag schema with a single " +
-					                        "element of type "+ DataType.findTypeName(DataType.TUPLE) +
-					                        " but got a bag schema with multiple elements.");
+					                int errCode = 1008;
+					                String msg = "Expected a bag schema with a single " +
+                                    "element of type "+ DataType.findTypeName(DataType.TUPLE) +
+                                    " but got a bag schema with multiple elements.";
+					                throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
 					            }
 					            Schema.FieldSchema tupleFS = s.getField(0);
 					            if(tupleFS.type != DataType.TUPLE) {
-					                throw new FrontendException("Expected a bag schema with a single " +
-					                        "element of type "+ DataType.findTypeName(DataType.TUPLE) +
-					                        " but got an element of type " +
-					                        DataType.findTypeName(tupleFS.type));
+					                int errCode = 1009;
+					                String msg = "Expected a bag schema with a single " +
+                                    "element of type "+ DataType.findTypeName(DataType.TUPLE) +
+                                    " but got an element of type " +
+                                    DataType.findTypeName(tupleFS.type);
+					                throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
 					            }
 					            s = tupleFS.schema;
 							    
@@ -228,12 +233,8 @@
 							if(null != s) {
 								for(int i = 0; i < s.size(); ++i) {
                                     Schema.FieldSchema fs;
-                                    try {
-                                        fs = new Schema.FieldSchema(s.getField(i));
-                                        fs.setParent(s.getField(i).canonicalName, op);
-                                    } catch (ParseException pe) {
-                                        throw new FrontendException(pe.getMessage());
-                                    }
+                                    fs = new Schema.FieldSchema(s.getField(i));
+                                    fs.setParent(s.getField(i).canonicalName, op);
 									log.debug("fs: " + fs);
                                     if(null != userDefinedSchema) {
                                         Schema.FieldSchema userDefinedFieldSchema;
@@ -242,10 +243,10 @@
                                                 userDefinedFieldSchema = userDefinedSchema.getField(i);
                                                 fs = fs.mergePrefixFieldSchema(userDefinedFieldSchema);
                                             }
-                                        } catch (ParseException pe) {
-                                            throw new FrontendException(pe.getMessage());
                                         } catch (SchemaMergeException sme) {
-                                            throw new FrontendException(sme.getMessage());
+                                            int errCode = 1016;
+                                            String msg = "Problems in merging user defined schema";
+                                            throw new FrontendException(msg, errCode, PigException.INPUT, false, null, sme);
                                         }
                                         outerCanonicalAlias = null;
                                     }
@@ -254,11 +255,7 @@
 									if((null != outerCanonicalAlias) && (null != innerCanonicalAlias)) {
 										String disambiguatorAlias = outerCanonicalAlias + "::" + innerCanonicalAlias;
 										newFs = new Schema.FieldSchema(disambiguatorAlias, fs.schema, fs.type);
-                                        try {
-                                            newFs.setParent(s.getField(i).canonicalName, op);
-										} catch (ParseException pe) {
-                                            throw new FrontendException(pe.getMessage());
-                                        }
+                                        newFs.setParent(s.getField(i).canonicalName, op);
                                         fss.add(newFs);
                                         updateAliasCount(aliases, disambiguatorAlias);
 										//it's fine if there are duplicates
@@ -266,11 +263,7 @@
 										//flattening
 									} else {
 										newFs = new Schema.FieldSchema(fs);
-                                        try {
-                                            newFs.setParent(s.getField(i).canonicalName, op);
-										} catch (ParseException pe) {
-                                            throw new FrontendException(pe.getMessage());
-                                        }
+                                        newFs.setParent(s.getField(i).canonicalName, op);
 										fss.add(newFs);
 									}
                                     updateAliasCount(aliases, innerCanonicalAlias);
@@ -282,15 +275,17 @@
                                 if(null != userDefinedSchema) {
                                     if(!DataType.isSchemaType(planFs.type)) {
                                         if(userDefinedSchema.size() > 1) {
-                                            throw new FrontendException("Schema mismatch. A basic type on flattening cannot have more than one column. User defined schema: " + userDefinedSchema);
+                                            int errCode = 1017;
+                                            String msg = "Schema mismatch. A basic type on flattening cannot have more than one column. User defined schema: " + userDefinedSchema;
+                                            throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                                         }
 								        newFs = new Schema.FieldSchema(null, planFs.type);
                                         try {
                                             newFs = newFs.mergePrefixFieldSchema(userDefinedSchema.getField(0));
                                         } catch (SchemaMergeException sme) {
-                                            throw new FrontendException(sme.getMessage());
-                                        } catch (ParseException pe) {
-                                            throw new FrontendException(pe.getMessage());
+                                            int errCode = 1016;
+                                            String msg = "Problems in merging user defined schema";
+                                            throw new FrontendException(msg, errCode, PigException.INPUT, false, null, sme);
                                         }
                                         updateAliasCount(aliases, newFs.alias);
                                         fss.add(newFs);
@@ -324,9 +319,9 @@
                                     newFs = newFs.mergePrefixFieldSchema(userDefinedSchema.getField(0));
                                     updateAliasCount(aliases, newFs.alias);
                                 } catch (SchemaMergeException sme) {
-                                    throw new FrontendException(sme.getMessage());
-                                } catch (ParseException pe) {
-                                    throw new FrontendException(pe.getMessage());
+                                    int errCode = 1016;
+                                    String msg = "Problems in merging user defined schema";
+                                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null, sme);
                                 }
                             }
                             newFs.setParent(planFs.canonicalName, op);
@@ -336,14 +331,10 @@
 						//did not get a valid list of field schemas
                         String outerCanonicalAlias = null;
                         if(null != userDefinedSchema) {
-                            try {
-                                Schema.FieldSchema userDefinedFieldSchema = new Schema.FieldSchema(userDefinedSchema.getField(0));
-                                fss.add(userDefinedFieldSchema);
-                                userDefinedFieldSchema.setParent(null, op);
-                                updateAliasCount(aliases, userDefinedFieldSchema.alias);
-                            } catch (ParseException pe) {
-                                throw new FrontendException(pe.getMessage());
-                            }
+                            Schema.FieldSchema userDefinedFieldSchema = new Schema.FieldSchema(userDefinedSchema.getField(0));
+                            fss.add(userDefinedFieldSchema);
+                            userDefinedFieldSchema.setParent(null, op);
+                            updateAliasCount(aliases, userDefinedFieldSchema.alias);
                         } else {
                             mSchema = null;
                             mIsSchemaComputed = true;
@@ -354,8 +345,6 @@
                     mSchema = null;
                     mIsSchemaComputed = false;
                     throw fee;
-                } catch (ParseException e) {
-                    throw new FrontendException(e);
                 }
             }
 			//check for duplicate column names and throw an error if there are duplicates
@@ -381,7 +370,7 @@
 				}
 			}
 			if(duplicates) {
-				String errMessage = "Found duplicates in schema! ";
+				String errMessage = "Found duplicates in schema. ";
 				if(duplicateAliases.size() > 0) {
 					Set<String> duplicateCols = duplicateAliases.keySet();
 					Iterator<String> iter = duplicateCols.iterator();
@@ -394,7 +383,8 @@
 				}
 				errMessage += ". Please alias the columns with unique names.";
 				log.debug(errMessage);
-				throw new FrontendException(errMessage);
+				int errCode = 1007;
+				throw new FrontendException(errMessage, errCode, PigException.INPUT, false, null);
 			}
             mSchema = new Schema(fss);
 			//add the aliases that are unique after flattening

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOGenerate.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOGenerate.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOGenerate.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOGenerate.java Fri Jan 23 23:07:30 2009
@@ -25,8 +25,9 @@
 import java.util.Map;
 import java.util.HashMap;
 import java.util.Collection;
+
+import org.apache.pig.PigException;
 import org.apache.pig.data.DataType;
-import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;
@@ -128,7 +129,9 @@
             try {
                 LogicalOperator op = s.iterator().next();
                 if (null == op) {
-                    throw new FrontendException("Could not find operator in plan");
+                    int errCode = 1006;
+                    String msg = "Could not find operator in plan";
+                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                 }
                 if(op instanceof ExpressionOperator) {
                     fss.add(new Schema.FieldSchema(((ExpressionOperator)op).getFieldSchema()));

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOLoad.java Fri Jan 23 23:07:30 2009
@@ -22,6 +22,7 @@
 
 import org.apache.pig.ExecType;
 import org.apache.pig.LoadFunc;
+import org.apache.pig.PigException;
 import org.apache.pig.backend.datastorage.DataStorage;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.PigContext;
@@ -138,9 +139,9 @@
                 }
                 mIsSchemaComputed = true;
             } catch (IOException ioe) {
-                ioe.printStackTrace();
-                FrontendException fee = new FrontendException(ioe.getMessage());
-                fee.initCause(ioe);
+                int errCode = 1018;
+                String msg = "Problem determining schema during load";
+                FrontendException fee = new FrontendException(msg, errCode, PigException.INPUT, false, null, ioe);
                 mIsSchemaComputed = false;
                 mSchema = null;
                 throw fee;
@@ -153,7 +154,7 @@
      * @see org.apache.pig.impl.logicalLayer.LogicalOperator#setSchema(org.apache.pig.impl.logicalLayer.schema.Schema)
      */
     @Override
-    public void setSchema(Schema schema) throws ParseException {
+    public void setSchema(Schema schema) throws FrontendException {
         // In general, operators don't generate their schema until they're
         // asked, so ask them to do it.
         try {
@@ -170,9 +171,9 @@
             try {
                 mSchema = mSchema.mergePrefixSchema(schema, true, true);
             } catch (SchemaMergeException e) {
-                ParseException pe = new ParseException("Unable to merge schemas");
-                pe.initCause(e);
-                throw pe;
+                int errCode = 1019;
+                String msg = "Unable to merge schemas";
+                throw new FrontendException(msg, errCode, PigException.INPUT, false, null, e);
             }
         }
     }

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOPrinter.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOPrinter.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOPrinter.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOPrinter.java Fri Jan 23 23:07:30 2009
@@ -28,7 +28,6 @@
 import java.util.Collections;
 
 import org.apache.pig.data.DataType;
-import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.DepthFirstWalker;
 import org.apache.pig.impl.plan.DependencyOrderWalker;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOProject.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOProject.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOProject.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOProject.java Fri Jan 23 23:07:30 2009
@@ -24,6 +24,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.pig.PigException;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;
@@ -222,7 +223,7 @@
             } else {
                 //its n list of columns to project including a single column
                 List<Schema.FieldSchema> fss = new ArrayList<Schema.FieldSchema>(mProjection.size());
-                try {
+                //try {
                     if (null != expressionOperator) {
                         log.debug("expressionOperator is not null");
                         if(mProjection.size() == 1) {
@@ -248,16 +249,20 @@
                                             // check that indeed we only have one field schema
                                             // which is that of a tuple
                                             if(s.getFields().size() != 1) {
-                                                throw new FrontendException("Expected a bag schema with a single " +
-                                                        "element of type "+ DataType.findTypeName(DataType.TUPLE) +
-                                                        " but got a bag schema with multiple elements.");
+                                                int errCode = 1008;
+                                                String msg = "Expected a bag schema with a single " +
+                                                "element of type "+ DataType.findTypeName(DataType.TUPLE) +
+                                                " but got a bag schema with multiple elements.";
+                                                throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                                             }
                                             Schema.FieldSchema tupleFS = s.getField(0);
                                             if(tupleFS.type != DataType.TUPLE) {
-                                                throw new FrontendException("Expected a bag schema with a single " +
-                                                        "element of type "+ DataType.findTypeName(DataType.TUPLE) +
-                                                        " but got an element of type " +
-                                                        DataType.findTypeName(tupleFS.type));
+                                                int errCode = 1009;
+                                                String msg = "Expected a bag schema with a single " +
+                                                "element of type "+ DataType.findTypeName(DataType.TUPLE) +
+                                                " but got an element of type " +
+                                                DataType.findTypeName(tupleFS.type);
+                                                throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                                             }
                                             fs = tupleFS.schema.getField(mProjection.get(0));
                                         } else {
@@ -342,11 +347,11 @@
                         log.warn("The input for a projection operator cannot be null");
                         //fss.add(new Schema.FieldSchema(null, DataType.BYTEARRAY));
                     }
-                } catch(ParseException pe) {
-                    mFieldSchema = null;
-                    mIsFieldSchemaComputed = false;
-                    throw new FrontendException(pe.getMessage());
-                }
+                //} catch(ParseException pe) {
+                //    mFieldSchema = null;
+                //    mIsFieldSchemaComputed = false;
+                //    throw new FrontendException(pe.getMessage());
+                //}
                 mFieldSchema = new Schema.FieldSchema(expressionOperator.getAlias(), new Schema(fss));
                 mFieldSchema.setParent(null, expressionOperator);
                 mIsFieldSchemaComputed = true;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSort.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSort.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSort.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSort.java Fri Jan 23 23:07:30 2009
@@ -24,7 +24,7 @@
 import java.util.Iterator;
 
 import org.apache.pig.FuncSpec;
-import org.apache.pig.impl.logicalLayer.FrontendException;
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.VisitorException;
@@ -135,7 +135,9 @@
             try {
                 LogicalOperator op = s.iterator().next();
                 if (null == op) {
-                    throw new FrontendException("Could not find operator in plan");
+                    int errCode = 1006;
+                    String msg = "Could not find operator in plan";                    
+                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                 }
                 if(op instanceof ExpressionOperator) {
                     Schema.FieldSchema fs = new Schema.FieldSchema(((ExpressionOperator)op).getFieldSchema());

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplit.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplit.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplit.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplit.java Fri Jan 23 23:07:30 2009
@@ -23,7 +23,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.pig.impl.logicalLayer.FrontendException;
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;
@@ -77,7 +77,9 @@
             try {
                 LogicalOperator op = s.iterator().next();
                 if (null == op) {
-                    throw new FrontendException("Could not find operator in plan");
+                    int errCode = 1006;
+                    String msg = "Could not find operator in plan";
+                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                 }
                 mSchema = s.iterator().next().getSchema();
                 mIsSchemaComputed = true;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplitOutput.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplitOutput.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplitOutput.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOSplitOutput.java Fri Jan 23 23:07:30 2009
@@ -23,6 +23,7 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.LogicalOperator;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
@@ -73,7 +74,9 @@
             try {
                 LogicalOperator input = mPlan.getPredecessors(this).get(0);
                 if (null == input) {
-                    throw new FrontendException("Could not find operator in plan");
+                    int errCode = 1006;
+                    String msg = "Could not find operator in plan";
+                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                 }
                 mSchema = input.getSchema();
                 mIsSchemaComputed = true;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUnion.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUnion.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUnion.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUnion.java Fri Jan 23 23:07:30 2009
@@ -21,6 +21,8 @@
 import java.util.List;
 import java.util.Collection;
 import java.util.Iterator;
+
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.PlanVisitor;
@@ -57,7 +59,9 @@
                 Iterator<LogicalOperator> iter = s.iterator();
                 LogicalOperator op = iter.next();
                 if (null == op) {
-                    throw new FrontendException("Could not find operator in plan");
+                    int errCode = 1006;
+                    String msg = "Could not find operator in plan";
+                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null);
                 }
                 mSchema = op.getSchema();
                 while(iter.hasNext()) {

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUserFunc.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUserFunc.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUserFunc.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LOUserFunc.java Fri Jan 23 23:07:30 2009
@@ -22,6 +22,7 @@
 
 import org.apache.pig.EvalFunc;
 import org.apache.pig.FuncSpec;
+import org.apache.pig.PigException;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.plan.VisitorException;
 import org.apache.pig.impl.logicalLayer.parser.ParseException;
@@ -89,10 +90,11 @@
             Schema inputSchema = new Schema();
             for(ExpressionOperator op: mArgs) {
                 if (!DataType.isUsableType(op.getType())) {
-                    String msg = "Problem with input: " + op + " of User-defined function: " + this ;
                     mFieldSchema = null;
                     mIsFieldSchemaComputed = false;
-                    throw new FrontendException(msg) ;
+                    int errCode = 1014;
+                    String msg = "Problem with input: " + op + " of User-defined function: " + this ;
+                    throw new FrontendException(msg, errCode, PigException.INPUT, false, null) ;
                 }
                 inputSchema.add(op.getFieldSchema());    
             }
@@ -103,7 +105,7 @@
 
             if (null != udfSchema) {
                 Schema.FieldSchema fs;
-                try {
+//                try {
                     if(udfSchema.size() == 0) {
                         fs = new Schema.FieldSchema(null, null, returnType);
                     } else if(udfSchema.size() == 1) {
@@ -111,9 +113,9 @@
                     } else {
                         fs = new Schema.FieldSchema(null, udfSchema, DataType.TUPLE);
                     }
-                } catch (ParseException pe) {
-                    throw new FrontendException(pe.getMessage());
-                }
+//                } catch (ParseException pe) {
+//                    throw new FrontendException(pe.getMessage());
+//                }
                 setType(fs.type);
                 mFieldSchema = fs;
                 mIsFieldSchemaComputed = true;

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LogicalOperator.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LogicalOperator.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LogicalOperator.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/LogicalOperator.java Fri Jan 23 23:07:30 2009
@@ -23,7 +23,6 @@
 import java.io.IOException;
 
 import org.apache.pig.data.DataType;
-import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.parser.ParseException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.Operator;
@@ -120,7 +119,7 @@
      *             if there is already a schema and the existing schema cannot
      *             be reconciled with this new schema.
      */
-    public void setSchema(Schema schema) throws ParseException {
+    public void setSchema(Schema schema) throws FrontendException {
         // In general, operators don't generate their schema until they're
         // asked, so ask them to do it.
         try {

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/ProjectStarTranslator.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/ProjectStarTranslator.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/ProjectStarTranslator.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/ProjectStarTranslator.java Fri Jan 23 23:07:30 2009
@@ -140,8 +140,8 @@
                             } else {
                                 newUserDefinedSchemaList.add(null);
                             }
-                        } catch (ParseException pe) {
-                            throw new VisitorException(pe.getMessage(), pe);
+                        } catch (FrontendException fee) {
+                            throw new VisitorException(fee.getMessage(), fee);
                         }
                     } else {
                         newUserDefinedSchemaList.add(null);

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/ImplicitSplitInserter.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/ImplicitSplitInserter.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/ImplicitSplitInserter.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/ImplicitSplitInserter.java Fri Jan 23 23:07:30 2009
@@ -21,9 +21,9 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.pig.PigException;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.logicalLayer.LOConst;
-import org.apache.pig.impl.logicalLayer.LOPrinter;
 import org.apache.pig.impl.logicalLayer.LOSplitOutput;
 import org.apache.pig.impl.logicalLayer.LogicalOperator;
 import org.apache.pig.impl.logicalLayer.LogicalPlan;
@@ -31,7 +31,6 @@
 import org.apache.pig.impl.plan.DepthFirstWalker;
 import org.apache.pig.impl.plan.NodeIdGenerator;
 import org.apache.pig.impl.plan.OperatorKey;
-import org.apache.pig.impl.plan.VisitorException;
 import org.apache.pig.impl.plan.optimizer.OptimizerException;
 
 public class ImplicitSplitInserter extends LogicalTransformer {
@@ -44,24 +43,40 @@
     public boolean check(List<LogicalOperator> nodes) throws OptimizerException {
         // Look to see if this is a non-split node with two outputs.  If so
         // it matches.
-        LogicalOperator op = nodes.get(0);
-        List<LogicalOperator> succs = mPlan.getSuccessors(op);
-        if (succs == null || succs.size() < 2) return false;
-        if (op instanceof LOSplit) return false;
-        return true;
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
+        try {
+            LogicalOperator op = nodes.get(0);
+            List<LogicalOperator> succs = mPlan.getSuccessors(op);
+            if (succs == null || succs.size() < 2) return false;
+            if (op instanceof LOSplit) return false;
+            return true;
+        } catch (Exception e) {
+            int errCode = 2048;
+            String msg = "Error while performing checks to introduce split operators.";
+            throw new OptimizerException(msg, errCode, PigException.BUG, e);
+        }
     }
 
     @Override
     public void transform(List<LogicalOperator> nodes)
             throws OptimizerException {
-        // Insert a split and its corresponding SplitOutput nodes into the plan
-        // between node 0 and 1 / 2.
-        String scope = nodes.get(0).getOperatorKey().scope;
-        NodeIdGenerator idGen = NodeIdGenerator.getGenerator();
-        LOSplit splitOp = new LOSplit(mPlan, new OperatorKey(scope, 
-                idGen.getNextNodeId(scope)), new ArrayList<LogicalOperator>());
-        splitOp.setAlias(nodes.get(0).getAlias());
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
         try {
+            // Insert a split and its corresponding SplitOutput nodes into the plan
+            // between node 0 and 1 / 2.
+            String scope = nodes.get(0).getOperatorKey().scope;
+            NodeIdGenerator idGen = NodeIdGenerator.getGenerator();
+            LOSplit splitOp = new LOSplit(mPlan, new OperatorKey(scope, 
+                    idGen.getNextNodeId(scope)), new ArrayList<LogicalOperator>());
+            splitOp.setAlias(nodes.get(0).getAlias());
             mPlan.add(splitOp);
             
             // Find all the successors and connect appropriately with split
@@ -143,7 +158,9 @@
             }
             
         } catch (Exception e) {
-            throw new OptimizerException(e);
+            int errCode = 2047;
+            String msg = "Internal error. Unable to introduce split operators.";
+            throw new OptimizerException(msg, errCode, PigException.BUG, e);
         }
     }
 }

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/OpLimitOptimizer.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/OpLimitOptimizer.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/OpLimitOptimizer.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/OpLimitOptimizer.java Fri Jan 23 23:07:30 2009
@@ -23,7 +23,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.pig.impl.logicalLayer.FrontendException;
+import org.apache.pig.PigException;
 import org.apache.pig.impl.logicalLayer.LOCogroup;
 import org.apache.pig.impl.logicalLayer.LOCross;
 import org.apache.pig.impl.logicalLayer.LODistinct;
@@ -38,7 +38,6 @@
 import org.apache.pig.impl.logicalLayer.LogicalOperator;
 import org.apache.pig.impl.logicalLayer.LogicalPlan;
 import org.apache.pig.impl.plan.DepthFirstWalker;
-import org.apache.pig.impl.plan.VisitorException;
 import org.apache.pig.impl.plan.optimizer.OptimizerException;
 
 /**
@@ -60,26 +59,53 @@
 
     @Override
     public boolean check(List<LogicalOperator> nodes) throws OptimizerException {
-        LogicalOperator lo = nodes.get(0);
-        if (lo == null || !(lo instanceof LOLimit)) {
-            throw new RuntimeException("Expected limit, got " +
-                lo.getClass().getName());
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
+        
+        try {
+            LogicalOperator lo = nodes.get(0);
+            if (lo == null || !(lo instanceof LOLimit)) {
+                int errCode = 2005;
+                String msg = "Expected " + LOLimit.class.getSimpleName() + ", got " + lo.getClass().getSimpleName();
+                throw new OptimizerException(msg, errCode, PigException.BUG);
+            }
+        } catch (Exception e) {
+            int errCode = 2049;
+            String msg = "Error while performing checks to optimize limit operator.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
         }
 
         return true;
     }
 
     @Override
-    public void transform(List<LogicalOperator> nodes) throws OptimizerException {
-        LogicalOperator lo = nodes.get(0);
-        if (lo == null || !(lo instanceof LOLimit)) {
-            throw new RuntimeException("Expected limit, got " +
-                lo.getClass().getName());
+    public void transform(List<LogicalOperator> nodes) throws OptimizerException {        
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
+        try {
+            LogicalOperator lo = nodes.get(0);
+            if (lo == null || !(lo instanceof LOLimit)) {
+                int errCode = 2005;
+                String msg = "Expected " + LOLimit.class.getSimpleName() + ", got " + lo.getClass().getSimpleName();
+                throw new OptimizerException(msg, errCode, PigException.BUG);
+            }
+
+            LOLimit limit = (LOLimit)lo;
+            
+            processNode(limit);
+        } catch (OptimizerException oe) {
+            throw oe;
+        } catch (Exception e) {
+            int errCode = 2050;
+            String msg = "Internal error. Unable to optimize limit operator.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
         }
-
-        LOLimit limit = (LOLimit)lo;
-        
-        processNode(limit);
     }
     
     // We recursively optimize a LOLimit, until one of the following conditions occurs:
@@ -89,97 +115,121 @@
     //    and recursively optimize the new LOLimit
     public void processNode(LOLimit limit) throws OptimizerException
     {
-    	List<LogicalOperator> predecessors = mPlan.getPredecessors(limit);
-        if (predecessors.size()!=1)
-        	throw new OptimizerException("Limit have zero or more than one inputs");
-        LogicalOperator predecessor = predecessors.get(0);
-        
-        // Limit cannot be pushed up
-        if (predecessor instanceof LOCogroup || predecessor instanceof LOFilter ||
-        		predecessor instanceof LOLoad || predecessor instanceof LOSplit ||
-        		predecessor instanceof LOSplitOutput || predecessor instanceof LODistinct)
-        {
-        	return;
-        }
-        // Limit can be pushed in front of ForEach if it does not have a flatten
-        else if (predecessor instanceof LOForEach)
-        {
-        	LOForEach loForEach = (LOForEach)predecessor;
-        	List<Boolean> mFlatten = loForEach.getFlatten();
-        	boolean hasFlatten = false;
-        	for (Boolean b:mFlatten)
-        		if (b.equals(true)) hasFlatten = true;
-        	
-        	// We can safely move LOLimit up
-        	if (!hasFlatten)
-        	{
-				// Get operator before LOFilter
-				LogicalOperator prepredecessor = mPlan.getPredecessors(predecessor).get(0);
-				if (prepredecessor!=null)
-				{
-						try {
-							removeFromChain(limit, null);
-							insertBetween(prepredecessor, limit, predecessor, null);
-							
-						} catch (Exception e) {
-							throw new OptimizerException("Can not move LOLimit up", e);
-						}
-				}
-				else
-				{
-					throw new OptimizerException("LOFilter should have one input");
-				}
-	            // we can move LOLimit even further, recursively optimize LOLimit
-	            processNode(limit);
-        	}
-        }
-        // Limit can be duplicated, and the new instance pushed in front of an operator for the following operators 
-        // (that is, if you have X->limit, you can transform that to limit->X->limit):
-        else if (predecessor instanceof LOCross || predecessor instanceof LOUnion)
-        {
-        	LOLimit newLimit = null;
-			List<LogicalOperator> nodesToProcess = new ArrayList<LogicalOperator>();
-			for (LogicalOperator prepredecessor:mPlan.getPredecessors(predecessor))
-				nodesToProcess.add(prepredecessor);
-			for (LogicalOperator prepredecessor:nodesToProcess)
-			{
-				try {
-					newLimit = (LOLimit)limit.duplicate();
-					insertBetween(prepredecessor, newLimit, predecessor, null);
-				} catch (Exception e) {
-					throw new OptimizerException("Can not insert LOLimit clone", e);
-				}
-				// we can move the new LOLimit even further, recursively optimize LOLimit
-				processNode(newLimit);
-			}
-        }
-        // Limit can be merged into LOSort, result a "limited sort"
-        else if (predecessor instanceof LOSort)
-        {
-        	LOSort sort = (LOSort)predecessor;
-        	if (sort.getLimit()==-1)
-        		sort.setLimit(limit.getLimit());
-        	else
-        	    sort.setLimit(sort.getLimit()<limit.getLimit()?sort.getLimit():limit.getLimit());
-        	try {
-				removeFromChain(limit, null);
-			} catch (Exception e) {
-				throw new OptimizerException("Can not remove LOLimit after LOSort", e);
-			}
-        }
-        // Limit is merged into another LOLimit
-        else if (predecessor instanceof LOLimit)
-        {
-        	LOLimit beforeLimit = (LOLimit)predecessor;
-        	beforeLimit.setLimit(beforeLimit.getLimit()<limit.getLimit()?beforeLimit.getLimit():limit.getLimit());
-        	try {
-				removeFromChain(limit, null);
-			} catch (Exception e) {
-				throw new OptimizerException("Can not remove LOLimit after LOLimit", e);
-			}
+    	try {
+            List<LogicalOperator> predecessors = mPlan.getPredecessors(limit);
+            if (predecessors.size()!=1) {
+            	int errCode = 2008;
+            	String msg = "Limit cannot have more than one input. Found " + predecessors.size() + " inputs.";
+                throw new OptimizerException(msg, errCode, PigException.BUG);
+            }
+            LogicalOperator predecessor = predecessors.get(0);
+            
+            // Limit cannot be pushed up
+            if (predecessor instanceof LOCogroup || predecessor instanceof LOFilter ||
+            		predecessor instanceof LOLoad || predecessor instanceof LOSplit ||
+            		predecessor instanceof LOSplitOutput || predecessor instanceof LODistinct)
+            {
+            	return;
+            }
+            // Limit can be pushed in front of ForEach if it does not have a flatten
+            else if (predecessor instanceof LOForEach)
+            {
+            	LOForEach loForEach = (LOForEach)predecessor;
+            	List<Boolean> mFlatten = loForEach.getFlatten();
+            	boolean hasFlatten = false;
+            	for (Boolean b:mFlatten)
+            		if (b.equals(true)) hasFlatten = true;
+            	
+            	// We can safely move LOLimit up
+            	if (!hasFlatten)
+            	{
+            		// Get operator before LOFilter
+            		LogicalOperator prepredecessor = mPlan.getPredecessors(predecessor).get(0);
+            		if (prepredecessor!=null)
+            		{
+            				try {
+            					removeFromChain(limit, null);
+            					insertBetween(prepredecessor, limit, predecessor, null);
+            					
+            				} catch (Exception e) {
+            				    int errCode = 2009;
+            				    String msg = "Can not move LOLimit up";
+            					throw new OptimizerException(msg, errCode, PigException.BUG, e);
+            				}
+            		}
+            		else
+            		{
+            		    int errCode = 2010;
+            		    String msg = "LOFilter should have one input";
+            			throw new OptimizerException(msg, errCode, PigException.BUG);
+            		}
+                    // we can move LOLimit even further, recursively optimize LOLimit
+                    processNode(limit);
+            	}
+            }
+            // Limit can be duplicated, and the new instance pushed in front of an operator for the following operators 
+            // (that is, if you have X->limit, you can transform that to limit->X->limit):
+            else if (predecessor instanceof LOCross || predecessor instanceof LOUnion)
+            {
+            	LOLimit newLimit = null;
+            	List<LogicalOperator> nodesToProcess = new ArrayList<LogicalOperator>();
+            	for (LogicalOperator prepredecessor:mPlan.getPredecessors(predecessor))
+            		nodesToProcess.add(prepredecessor);
+            	for (LogicalOperator prepredecessor:nodesToProcess)
+            	{
+            		try {
+            			newLimit = (LOLimit)limit.duplicate();
+            			insertBetween(prepredecessor, newLimit, predecessor, null);
+            		} catch (Exception e) {
+            		    int errCode = 2011;
+            		    String msg = "Can not insert LOLimit clone";
+            			throw new OptimizerException(msg, errCode, PigException.BUG, e);
+            		}
+            		// we can move the new LOLimit even further, recursively optimize LOLimit
+            		processNode(newLimit);
+            	}
+            }
+            // Limit can be merged into LOSort, result a "limited sort"
+            else if (predecessor instanceof LOSort)
+            {
+            	LOSort sort = (LOSort)predecessor;
+            	if (sort.getLimit()==-1)
+            		sort.setLimit(limit.getLimit());
+            	else
+            	    sort.setLimit(sort.getLimit()<limit.getLimit()?sort.getLimit():limit.getLimit());
+            	try {
+            		removeFromChain(limit, null);
+            	} catch (Exception e) {
+            	    int errCode = 2012;
+            	    String msg = "Can not remove LOLimit after LOSort";
+            		throw new OptimizerException(msg, errCode, PigException.BUG, e);
+            	}
+            }
+            // Limit is merged into another LOLimit
+            else if (predecessor instanceof LOLimit)
+            {
+            	LOLimit beforeLimit = (LOLimit)predecessor;
+            	beforeLimit.setLimit(beforeLimit.getLimit()<limit.getLimit()?beforeLimit.getLimit():limit.getLimit());
+            	try {
+            		removeFromChain(limit, null);
+            	} catch (Exception e) {
+            	    int errCode = 2012;
+            	    String msg = "Can not remove LOLimit after LOLimit";
+            		throw new OptimizerException(msg, errCode, PigException.BUG, e);
+            	}
+            }
+            else {
+                int errCode = 2013;
+                String msg = "Moving LOLimit in front of " + predecessor.getClass().getSimpleName() + " is not implemented";
+            	throw new OptimizerException(msg, errCode, PigException.BUG);
+            }
+    	} catch (OptimizerException oe) {
+    	    throw oe;
+        } catch (Exception e) {
+            int errCode = 2050;
+            String msg = "Internal error. Unable to optimize limit operator.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
         }
-        else
-        	throw new OptimizerException("Move LOLimit in front of " + predecessor.getClass() + " is not implemented");
     }
 }
 

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/StreamOptimizer.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/StreamOptimizer.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/StreamOptimizer.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/StreamOptimizer.java Fri Jan 23 23:07:30 2009
@@ -23,6 +23,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.LoadFunc;
+import org.apache.pig.PigException;
 import org.apache.pig.ReversibleLoadStoreFunc;
 import org.apache.pig.StoreFunc;
 import org.apache.pig.builtin.BinaryStorage;
@@ -65,10 +66,17 @@
     public boolean check(List<LogicalOperator> nodes) throws OptimizerException {
         mOptimizeLoad = false;
         mOptimizeStore = false;
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
+
         LogicalOperator lo = nodes.get(0);
         if (lo == null || !(lo instanceof LOStream)) {
-            throw new RuntimeException("Expected stream, got " +
-                lo.getClass().getName());
+            int errCode = 2005;
+            String msg = "Expected " + LOStream.class.getSimpleName() + ", got " + lo.getClass().getSimpleName();
+            throw new OptimizerException(msg, errCode, PigException.BUG);            
         }
         LOStream stream = (LOStream)lo;
         
@@ -82,7 +90,11 @@
     }
     
     private void checkLoadOptimizable(LOStream stream) {
-        LogicalOperator predecessor = mPlan.getPredecessors(stream).get(0);
+        List<LogicalOperator> predecessors = mPlan.getPredecessors(stream);
+        if((predecessors == null) || (predecessors.size() <= 0)) {
+            return;
+        }
+        LogicalOperator predecessor = predecessors.get(0);
         if(predecessor instanceof LOLoad) {
             LOLoad load = (LOLoad)predecessor;
             if(!load.isSplittable()) {
@@ -186,6 +198,12 @@
     
     @Override
     public void transform(List<LogicalOperator> nodes) throws OptimizerException {
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
+        
         try {
             LogicalOperator lo = nodes.get(0);
             if (lo == null || !(lo instanceof LOStream)) {
@@ -208,8 +226,9 @@
                 stream.setOptimizedSpec(Handle.OUTPUT, BinaryStorage.class.getName());
             }
         } catch (Exception e) {
-            throw new OptimizerException(
-                "Unable to optimize load-stream-store optimization", e);
+            int errCode = 2014;
+            String msg = "Unable to optimize load-stream-store optimization"; 
+            throw new OptimizerException(msg, errCode, PigException.BUG, e);
         }
     }
 }

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/TypeCastInserter.java
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/TypeCastInserter.java?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/TypeCastInserter.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/optimizer/TypeCastInserter.java Fri Jan 23 23:07:30 2009
@@ -32,16 +32,13 @@
 import org.apache.pig.impl.logicalLayer.LOStream;
 import org.apache.pig.impl.logicalLayer.LogicalOperator;
 import org.apache.pig.impl.logicalLayer.LogicalPlan;
-import org.apache.pig.impl.logicalLayer.parser.ParseException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.plan.DepthFirstWalker;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.plan.optimizer.OptimizerException;
 import org.apache.pig.FuncSpec;
-import org.apache.pig.LoadFunc;
-import org.apache.pig.impl.PigContext;
+import org.apache.pig.PigException;
 import org.apache.pig.impl.streaming.StreamingCommand;
-import org.apache.pig.impl.streaming.StreamingCommand.Handle;
 import org.apache.pig.impl.streaming.StreamingCommand.HandleSpec;
 
 /**
@@ -91,31 +88,44 @@
 
             // If all we've found are byte arrays, we don't need a projection.
             return sawOne;
+        } catch(OptimizerException oe) {
+            throw oe;
         } catch (Exception e) {
-            throw new OptimizerException("Caught exception while trying to " +
-                " check if type casts are needed", e);
+            int errCode = 2004;
+            String msg = "Internal error while trying to check if type casts are needed";
+            throw new OptimizerException(msg, errCode, PigException.BUG, e);
         }
     }
     
     private LogicalOperator getOperator(List<LogicalOperator> nodes) throws FrontendException {
+        if((nodes == null) || (nodes.size() <= 0)) {
+            int errCode = 2052;
+            String msg = "Internal error. Cannot retrieve operator from null or empty list.";
+            throw new OptimizerException(msg, errCode, PigException.BUG);
+        }
+        
         LogicalOperator lo = nodes.get(0);
         if(operatorClassName == LogicalOptimizer.LOLOAD_CLASSNAME) {
             if (lo == null || !(lo instanceof LOLoad)) {
-                throw new RuntimeException("Expected load, got " +
-                    lo.getClass().getName());
+                int errCode = 2005;
+                String msg = "Expected " + LOLoad.class.getSimpleName() + ", got " + lo.getClass().getSimpleName();
+                throw new OptimizerException(msg, errCode, PigException.BUG);
             }
     
             return lo;
         } else if(operatorClassName == LogicalOptimizer.LOSTREAM_CLASSNAME){
             if (lo == null || !(lo instanceof LOStream)) {
-                throw new RuntimeException("Expected stream, got " +
-                    lo.getClass().getName());
+                int errCode = 2005;
+                String msg = "Expected " + LOStream.class.getSimpleName() + ", got " + lo.getClass().getSimpleName();
+                throw new OptimizerException(msg, errCode, PigException.BUG);
             }
     
             return lo;
         } else {
             // we should never be called with any other operator class name
-            throw new FrontendException("TypeCastInserter invoked with an invalid operator class name:" + operatorClassName);
+            int errCode = 1034;
+            String msg = "TypeCastInserter invoked with an invalid operator class name:" + operatorClassName;
+            throw new OptimizerException(msg, errCode, PigException.INPUT);
         }
    
     }
@@ -169,7 +179,9 @@
                                 HandleSpec streamOutputSpec = command.getOutputSpec(); 
                                 loadFuncSpec = new FuncSpec(streamOutputSpec.getSpec());
                             } else {
-                                throw new OptimizerException("TypeCastInserter invoked with an invalid operator class name:" + lo.getClass().getSimpleName());
+                                int errCode = 2006;
+                                String msg = "TypeCastInserter invoked with an invalid operator class name: " + lo.getClass().getSimpleName();
+                                throw new OptimizerException(msg, errCode, PigException.BUG);
                             }
                             cast.setLoadFuncSpec(loadFuncSpec);
                             typeChanges.put(fs.canonicalName, fs.type);
@@ -195,9 +207,12 @@
 
             rebuildSchemas();
 
+        } catch (OptimizerException oe) {
+            throw oe;
         } catch (Exception e) {
-            throw new OptimizerException(
-                "Unable to insert type casts into plan", e);
+            int errCode = 2007;
+            String msg = "Unable to insert type casts into plan"; 
+            throw new OptimizerException(msg, errCode, PigException.BUG, e);
         }
     }
 }

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt
URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt?rev=737239&r1=737238&r2=737239&view=diff
==============================================================================
--- hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt Fri Jan 23 23:07:30 2009
@@ -59,6 +59,7 @@
 import org.apache.pig.StoreFunc;
 import org.apache.pig.FuncSpec;
 import org.apache.pig.impl.plan.VisitorException;
+import org.apache.pig.PigException;
 
 
 public class QueryParser {
@@ -137,7 +138,7 @@
 							   new OperatorKey(scope, storeNodeId),
                                new FileSpec(fileName, new FuncSpec(func)));
         } catch (IOException ioe) {
-            throw new FrontendException(ioe.getMessage());
+            throw new FrontendException(ioe.getMessage(), ioe);
         }
         
         try {
@@ -146,7 +147,7 @@
 	        storePlan.connect(input, store);
 	        attachPlan(storePlan, input, readFrom, new HashMap<LogicalOperator, Boolean>());
         } catch (ParseException pe) {
-            throw new FrontendException(pe.getMessage());
+            throw new FrontendException(pe.getMessage(), pe);
         }
 	    
         if (storePlan.getRoots().size() == 0) throw new RuntimeException("Store plan has no roots!");
@@ -421,7 +422,9 @@
      		try {
      		    command.addPathToShip(argPath);
      		} catch(IOException e) {
-                throw new ParseException(e.getMessage());
+                ParseException pe = new ParseException(e.getMessage());
+                pe.initCause(e);
+                throw pe;
             }
      	}
      	 
@@ -578,7 +581,9 @@
                 lp.connect(rootPred, root);
                 log.debug("Connected operator " + rootPred + " to " + root + " in the logical plan " + lp);
             } catch (FrontendException fee) {
-                throw new ParseException(fee.getMessage());
+                ParseException pe = new ParseException(fee.getMessage());
+                pe.initCause(fee); 
+                throw pe;
             }
         }
         log.trace("Exiting attachPlan");
@@ -842,7 +847,9 @@
             try {
 			    log.debug("Root: " + root.getClass().getName() + " schema: " + root.getSchema());
             } catch(FrontendException fee) {
-                throw new ParseException(fee.getMessage());
+            	ParseException pe = new ParseException(fee.getMessage());
+            	pe.initCause(fee);  
+                throw pe;
             }
 		}
 
@@ -1609,7 +1616,9 @@
 			    throw new ParseException("Invalid alias: " + t.image + " in " + over);
 		    } 
         } catch (FrontendException fee) {
-            throw new ParseException(fee.getMessage());
+        	ParseException pe = new ParseException(fee.getMessage());
+        	pe.initCause(fee);
+            throw pe;
         }
 		
 		log.trace("Exiting ColNameOrNum");
@@ -1770,8 +1779,6 @@
             try {
                 generatePlanClone = lpCloner.getClonedPlan();
             } catch (CloneNotSupportedException cnse) {
-                log.error(cnse.getMessage());
-                cnse.printStackTrace();
                 ParseException pe = new ParseException("Not able to clone foreach plan");
                 pe.initCause(cnse);
                 throw pe;
@@ -1780,7 +1787,9 @@
             try {
                 removeOperators.visit();
             } catch (VisitorException ve) {
-                throw new ParseException("Could not remove redundant operators in foreach plan.").initCause(ve);
+            	ParseException pe = new ParseException("Could not remove redundant operators in foreach plan.");
+                pe.initCause(ve);
+                throw pe;
             }
             foreachPlans.add(generatePlanClone);
         }
@@ -1794,8 +1803,9 @@
 			lp.connect(input, foreach);
 			log.debug("Connected alias " + input.getAlias() + " operator " + input.getClass().getName() + " object " + input + " to operator " + foreach.getClass().getName() + " in the logical plan");
 		} catch (PlanException planException) {
-			ParseException parseException = new ParseException(planException.getMessage());
-			throw parseException;
+			ParseException pe = new ParseException(planException.getMessage());
+			pe.initCause(planException);
+			throw pe;
 		}
 		
 		log.trace("Exiting ForEachClause");
@@ -1866,7 +1876,9 @@
                     	try {
                             command.addPathToShip(path);
                         } catch(IOException e) {
-                            throw new ParseException(e.getMessage());
+                        	ParseException pe = new ParseException(e.getMessage());
+                        	pe.initCause(e); 
+                            throw pe;
                         }
                     }
                 }
@@ -1878,7 +1890,9 @@
                     try {
                         command.addPathToCache(path);
                     } catch(IOException e) {
-                        throw new ParseException(e.getMessage());
+                    	ParseException pe = new ParseException(e.getMessage());
+                    	pe.initCause(e); 
+                        throw pe;
                     }
                 }
             }
@@ -2088,7 +2102,9 @@
 				    throw new ParseException("Invalid alias: " + t.image + " in " + over);
 			    }
             } catch (FrontendException fee) {
-                throw new ParseException(fee.getMessage());
+            	ParseException pe = new ParseException(fee.getMessage());
+            	pe.initCause(fee); 
+                throw pe;
             }
             foreachInput = new LOProject(lp, new OperatorKey(scope, getNextId()), input, i);
         }
@@ -2100,8 +2116,9 @@
                 lp.connect(input, foreachInput);
             }
         } catch (Exception planException) {
-            ParseException parseException = new ParseException(planException.getMessage());
-            throw parseException;
+        	ParseException pe = new ParseException(planException.getMessage());
+            pe.initCause(planException);
+            throw pe;
         }
     }
 |   t = <DOLLARVAR>
@@ -2236,8 +2253,9 @@
 			lp.connect(eOp, sort);
 			log.debug("Connected alias " + eOp.getAlias() + " operator " + eOp.getClass().getName() + " to operator " + sort.getClass().getName() + " the logical plan");
 		} catch (PlanException planException) {
-			ParseException parseException = new ParseException(planException.getMessage());
-			throw parseException;
+			ParseException pe = new ParseException(planException.getMessage());
+			pe.initCause(planException); 
+			throw pe;
 		}
 		
 		log.trace("Exiting NestedSortOrArrange");
@@ -2629,7 +2647,9 @@
 		try{
             FunctionType.tryCasting(func, funcType);
 		} catch (Exception e){
-			throw new ParseException(e.getMessage());
+			ParseException pe = new ParseException(e.getMessage());
+			pe.initCause(e); 
+			throw pe;
 		}
     }
     )
@@ -2688,7 +2708,9 @@
 		try{
             FunctionType.tryCasting(func, funcType);
 		} catch (Exception e){
-			throw new ParseException(e.getMessage());
+			ParseException pe = new ParseException(e.getMessage());
+			pe.initCause(e);
+			throw pe;
 		}
     }
     )
@@ -2711,12 +2733,13 @@
             case FunctionType.COMPARISONFUNC:
             case FunctionType.LOADFUNC:
             case FunctionType.STOREFUNC:
-                //funcSpec = new FuncSpec(func.getClass().getName() + (functionArgs == null? "(" + ")" : "(" + functionArgs + ")"));
                 func = pigContext.instantiateFuncFromSpec(funcSpec);
 		        try{
                     FunctionType.tryCasting(func, funcType);
 		        } catch (Exception e){
-			        throw new ParseException(e.getMessage());
+		        	ParseException pe = new ParseException(e.getMessage());
+		        	pe.initCause(e); 
+			        throw pe;
 		        }
                 break;
             default:
@@ -2975,7 +2998,12 @@
 	(  ( type = BasicType() )
 		{ 
             if(type == DataType.BYTEARRAY) {
-                throw new ParseException("Cannot cast to bytearray");
+            	int errCode = 1051;
+            	String msg = "Cannot cast to bytearray";
+                FrontendException fee = new FrontendException(msg, errCode, PigException.INPUT);
+                ParseException pe = new ParseException(msg);
+                pe.initCause(fee);
+                throw pe;
             }
 			fs = new Schema.FieldSchema(null, type); 
 			
@@ -3082,7 +3110,9 @@
 		try{
             FunctionType.tryCasting(func, funcType);
 		}catch (Exception e){
-			throw new ParseException(e.getMessage());
+			ParseException pe = new ParseException(e.getMessage());
+			pe.initCause(e);
+			throw pe;
 		}
 		log.trace("Exiting EvalFunction");
 		
@@ -3334,8 +3364,9 @@
 			    lp.connect(eOp, project);
             }
 		} catch (Exception planException) {
-			ParseException parseException = new ParseException(planException.getMessage());
-			throw parseException;
+			ParseException pe = new ParseException(planException.getMessage());
+			pe.initCause(planException); 
+			throw pe;
 		}
 		log.trace("Exiting DollarVar");
 		return project;
@@ -3382,8 +3413,9 @@
 					    lp.connect(op, item);
                     }
 				} catch (Exception planException) {
-					ParseException parseException = new ParseException(planException.getMessage());
-					throw parseException;
+					ParseException pe = new ParseException(planException.getMessage());
+					pe.initCause(planException);
+					throw pe;
 				}
 			}
 		}
@@ -3401,7 +3433,9 @@
 				    throw new ParseException("Invalid alias: " + t1.image + " in " + over);
 			    }
             } catch (FrontendException fee) {
-                throw new ParseException(fee.getMessage());
+            	ParseException pe = new ParseException(fee.getMessage());
+            	pe.initCause(fee);
+                throw pe;
             }
 			log.debug("Position of " + t1.image + " = " + i);
 			if(null != over) {
@@ -3419,6 +3453,7 @@
                 }
 			} catch (Exception planException) {
 				ParseException parseException = new ParseException(planException.getMessage());
+				parseException.initCause(planException);
 				throw parseException;
 			}
 		}