You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ga...@apache.org on 2008/02/27 01:52:08 UTC

svn commit: r631443 [1/10] - in /incubator/pig/branches/types: ./ lib-src/bzip2/org/apache/tools/bzip2r/ lib-src/shock/org/apache/pig/shock/ lib/ scripts/ src/org/apache/pig/ src/org/apache/pig/backend/ src/org/apache/pig/backend/datastorage/ src/org/a...

Author: gates
Date: Tue Feb 26 16:51:49 2008
New Revision: 631443

URL: http://svn.apache.org/viewvc?rev=631443&view=rev
Log:
Merged in changes from main branch.


Added:
    incubator/pig/branches/types/src/org/apache/pig/backend/
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/ContainerDescriptor.java
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/DataStorage.java
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/DataStorageException.java
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/ElementDescriptor.java
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/ImmutableOutputStream.java
    incubator/pig/branches/types/src/org/apache/pig/backend/datastorage/SeekableInputStream.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecException.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecJob.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecLogicalPlan.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecPhysicalOperator.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecPhysicalPlan.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecScopedLogicalOperator.java
    incubator/pig/branches/types/src/org/apache/pig/backend/executionengine/ExecutionEngine.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/HConfiguration.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/HDirectory.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/HFile.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/HPath.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/datastorage/HSeekableInputStream.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/HJob.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/MapRedPhysicalPlan.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/MapRedResult.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/MapreducePlanCompiler.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/POMapreduce.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/SplitSpec.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/MapReduceLauncher.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/PigCombine.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/PigInputFormat.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/PigMapReduce.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/PigOutputFormat.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/PigSplit.java
    incubator/pig/branches/types/src/org/apache/pig/backend/hadoop/executionengine/mapreduceExec/SortPartitioner.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/
    incubator/pig/branches/types/src/org/apache/pig/backend/local/datastorage/
    incubator/pig/branches/types/src/org/apache/pig/backend/local/datastorage/LocalDataStorage.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/datastorage/LocalDir.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/datastorage/LocalFile.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/datastorage/LocalPath.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/datastorage/LocalSeekableInputStream.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/LocalExecutionEngine.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/LocalJob.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/LocalPhysicalPlan.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/LocalResult.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POCogroup.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POEval.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POLoad.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POSort.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POSplit.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POStore.java
    incubator/pig/branches/types/src/org/apache/pig/backend/local/executionengine/POUnion.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOSplitOutput.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/OperatorKey.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/NodeIdGenerator.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/WrappedIOException.java
    incubator/pig/branches/types/test/org/apache/pig/test/MiniCluster.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestCombiner.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestGrunt.java
Removed:
    incubator/pig/branches/types/src/org/apache/pig/impl/io/DataBagFileReader.java
    incubator/pig/branches/types/src/org/apache/pig/impl/io/DataBagFileWriter.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LORead.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/MapReduceLauncher.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/PigCombine.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/PigInputFormat.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/PigMapReduce.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/PigOutputFormat.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/PigSplit.java
    incubator/pig/branches/types/src/org/apache/pig/impl/mapreduceExec/SortPartitioner.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/IntermedResult.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/LocalPlanCompiler.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/MapreducePlanCompiler.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POCogroup.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POEval.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POLoad.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POMapreduce.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/PORead.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POSort.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POSplit.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POSplitMaster.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POSplitSlave.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POStore.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POUnion.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/PhysicalPlan.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/SplitSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/PigLogger.java
Modified:
    incubator/pig/branches/types/CHANGES.txt
    incubator/pig/branches/types/build.xml
    incubator/pig/branches/types/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java
    incubator/pig/branches/types/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java
    incubator/pig/branches/types/lib/hadoop15.jar
    incubator/pig/branches/types/scripts/pig.pl
    incubator/pig/branches/types/src/org/apache/pig/Algebraic.java
    incubator/pig/branches/types/src/org/apache/pig/EvalFunc.java
    incubator/pig/branches/types/src/org/apache/pig/FilterFunc.java
    incubator/pig/branches/types/src/org/apache/pig/LoadFunc.java
    incubator/pig/branches/types/src/org/apache/pig/Main.java
    incubator/pig/branches/types/src/org/apache/pig/PigServer.java
    incubator/pig/branches/types/src/org/apache/pig/StandAloneParser.java
    incubator/pig/branches/types/src/org/apache/pig/StoreFunc.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/BinStorage.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/COUNT.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/MAX.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/MIN.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/PigDump.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/PigStorage.java
    incubator/pig/branches/types/src/org/apache/pig/builtin/TextLoader.java
    incubator/pig/branches/types/src/org/apache/pig/data/DataBag.java
    incubator/pig/branches/types/src/org/apache/pig/data/DefaultAbstractBag.java
    incubator/pig/branches/types/src/org/apache/pig/data/DefaultDataBag.java
    incubator/pig/branches/types/src/org/apache/pig/data/DistinctDataBag.java
    incubator/pig/branches/types/src/org/apache/pig/data/IndexedTuple.java
    incubator/pig/branches/types/src/org/apache/pig/data/SortedDataBag.java
    incubator/pig/branches/types/src/org/apache/pig/data/TimestampedTuple.java
    incubator/pig/branches/types/src/org/apache/pig/impl/FunctionInstantiator.java
    incubator/pig/branches/types/src/org/apache/pig/impl/PigContext.java
    incubator/pig/branches/types/src/org/apache/pig/impl/builtin/RandomSampleLoader.java
    incubator/pig/branches/types/src/org/apache/pig/impl/builtin/ShellBagEvalFunc.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/BinCondSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/CompositeEvalSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/ConstSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/EvalSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/FilterSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/FuncEvalSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/GenerateSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/MapLookupSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/ProjectSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/SimpleEvalSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/StarSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/collector/DataCollector.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/Cond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/FalseCond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/FuncCond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/NotCond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/OrCond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/RegexpCond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/cond/TrueCond.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/window/TupleWindowSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/eval/window/WindowSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/io/BufferedPositionedInputStream.java
    incubator/pig/branches/types/src/org/apache/pig/impl/io/FileLocalizer.java
    incubator/pig/branches/types/src/org/apache/pig/impl/io/FileSpec.java
    incubator/pig/branches/types/src/org/apache/pig/impl/io/PigFile.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOCogroup.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOEval.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOLoad.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOPrinter.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOSort.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOSplit.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOStore.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOUnion.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LOVisitor.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LogicalOperator.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LogicalPlan.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/LogicalPlanBuilder.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/AtomSchema.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/Schema.java
    incubator/pig/branches/types/src/org/apache/pig/impl/logicalLayer/schema/TupleSchema.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POPrinter.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/POVisitor.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/PhysicalOperator.java
    incubator/pig/branches/types/src/org/apache/pig/impl/physicalLayer/PlanCompiler.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/DataBuffer.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/JarManager.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/ObjectSerializer.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/Spillable.java
    incubator/pig/branches/types/src/org/apache/pig/impl/util/SpillableMemoryManager.java
    incubator/pig/branches/types/src/org/apache/pig/tools/cmdline/CmdLineParser.java
    incubator/pig/branches/types/src/org/apache/pig/tools/grunt/Grunt.java
    incubator/pig/branches/types/src/org/apache/pig/tools/grunt/GruntParser.java
    incubator/pig/branches/types/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj
    incubator/pig/branches/types/src/org/apache/pig/tools/streams/StreamGenerator.java
    incubator/pig/branches/types/src/org/apache/pig/tools/timer/PerformanceTimer.java
    incubator/pig/branches/types/src/org/apache/pig/tools/timer/PerformanceTimerFactory.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestAlgebraicEval.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestBuiltin.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestCmdLineParser.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestCompressedFiles.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestEvalPipeline.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestFilterOpNumeric.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestFilterOpString.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestInfixArithmetic.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestLargeFile.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestLogicalPlanBuilder.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestMapReduce.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestOrderBy.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestPi.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestPigFile.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestPigServer.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestPigSplit.java
    incubator/pig/branches/types/test/org/apache/pig/test/TestStore.java

Modified: incubator/pig/branches/types/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/CHANGES.txt?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/CHANGES.txt (original)
+++ incubator/pig/branches/types/CHANGES.txt Tue Feb 26 16:51:49 2008
@@ -62,3 +62,71 @@
 	PIG-30: Rewrote DataBags to better handle decisions of when to spill to
 	disk and to spill more intelligently. (gates)
 
+	PIG-61: Fixed MapreducePlanCompiler to use PigContext to load up the
+	comparator function instead of Class.forName.  (gates)
+
+	PIG-56: Made DataBag implement Iterable. (groves via gates)
+
+	PIG-63: Fix for non-ascii UTF-8 data (breed@ and olgan@)
+
+	PIG-77: Added eclipse specific files to svn:ignore
+
+	PIG-57: Fixed NPE in PigContext.fixUpDomain (francisoud via gates)
+
+	PIG-69: NPE in PigContext.setJobtrackerLocation (francisoud via gates)
+
+	PIG-78: src/org/apache/pig/builtin/PigStorage.java doesn't compile (arun
+	via olgan)
+
+	PIG-32: ABstraction layer (olgan)
+
+	PIG-87: Fix pig.pl to find java via JAVA_HOME instead of hardcoded default
+	path.  Also fix it to not die if pigclient.conf is missing. (craigm via
+	gates).
+
+	PIG-89: Fix DefaultDataBag, DistinctDataBag, SortedDataBag to close spill
+	files when they are done spilling (contributions by craigm, breed, and
+	gates, committed by gates).
+
+	PIG-95: Remove System.exit() statements from inside pig (joa23 via gates).
+
+	PIG-65: convert tabs to spaces (groves via olgan)
+
+	PIG-97: Turn off combiner in the case of Cogroup, as it doesn't work when
+	more than one bag is involved (gates).
+
+	PIG-92: Fix NullPointerException in PIgContext due to uninitialized conf
+	reference. (francisoud via gates)
+
+	PIG-83: Change everything except grunt and Main (PigServer on down) to use
+	common logging abstraction instead of log4j.  By default in grunt, log4j
+	still used as logging layer.  Also converted all System.out/err.println
+	statements to use logging instead. (francisoud via gates)
+
+	PIG-80: In a number of places stack trace information was being lost by an
+	exception being caught, and a different exception then thrown.  All those
+	locations have been changed so that the new exception now wraps the old.
+	(francisoud via gates).
+
+	PIG-84: Converted printStackTrace calls to calls to the logger.
+	(francisoud via gates).
+
+	PIG-88: Remove unused HadoopExe import from Main.  (pi_song via gates).
+
+	PIG-99: Fix to make unit tests not run out of memory. (francisoud via
+	gates).
+
+    PIG-107: enabled several tests. (francisoud via olgan)
+
+    PIG-46: abort processing on error for non-interactive mode (olston via
+    olgan)
+
+    PIG-109: improved exception handling (oae via olgan)
+
+	PIG-72: Move unit tests to use MiniDFS and MiniMR so that unit tests can
+	be run w/o access to a hadoop cluster. (xuzh via gates)
+
+    PIG-68: improvements to build.xml (joa23 via olgan)
+
+	PIG-110: Replaced code accidently merged out in PIG-32 fix that handled
+	flattening the combiner case. (gates and oae)

Modified: incubator/pig/branches/types/build.xml
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/build.xml?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/build.xml (original)
+++ incubator/pig/branches/types/build.xml Tue Feb 26 16:51:49 2008
@@ -1,150 +1,286 @@
 <project name="Pig" default="jar">
 
-	<!-- properties -->
-	<property name="dist.dir" value="${basedir}/dist" />
-	<property name="lib.dir" value="${basedir}/lib" />
-	<property name="src.dir" value="${basedir}/src" />
-	<property name="doc.dir" value="${basedir}/doc" />
-        <property name="test.reports.dir" value="${basedir}/test/reports" />
-	<property name="shock.src.dir" value="${basedir}/lib-src/shock" />
-	<property name="bzip2.src.dir" value="${basedir}/lib-src/bzip2" />
-	<property name="test.src.dir" value="${basedir}/test" />
-	<property name="output.jarfile" value="pig.jar" />
-	<property name="hadoop.jarfile" value="hadoop15.jar"/>
-	<property name="ssh.gateway" value=""/>
-	<property name="hod.server" value=""/>
-	<property name="hod.command" value=""/>
-	<property name="hod.param" value=""/>
-	<property name="test.junit.output.format" value="plain"/>
-	<property name="junit.hadoop.conf" value=""/>
-        	
-	<!-- setup the classpath -->
-	<path id="classpath">
-		<fileset file="${lib.dir}/${hadoop.jarfile}" />
-		<fileset dir="${lib.dir}" />
-	</path>
-
-	<target name="clean">
-		<delete dir="${dist.dir}" />
-		<delete dir="${doc.dir}" />
-                <delete dir="${test.reports.dir}" />
-		<delete file="${output.jarfile}" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParser.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/JJTQueryParserState.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/Node.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/ParseException.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParser.jj" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParserConstants.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParserTokenManager.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParserTreeConstants.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/SimpleCharStream.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/SimpleNode.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/Token.java" />
-		<delete file="${src.dir}/org/apache/pig/impl/logicalLayer/parser/TokenMgrError.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/PigScriptParser.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/PigScriptParserConstants.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/PigScriptParserTokenManager.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/ParseException.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/SimpleCharStream.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/Token.java" />
-		<delete file="${src.dir}/org/apache/pig/tools/pigscript/parser/TokenMgrError.java" />
-	</target>
-
-    <target name="depend">
-        <mkdir dir="depend"/>
-        <echo>*** Resolving dependencies ***</echo>
-        <depend srcdir="src;lib-src/shock;lib-src/bzip2" destdir="dist" cache="depend"/>
-    </target>
-
-	<target name="compile" depends="depend, cc-compile, lib-compile">
-		<mkdir dir="${dist.dir}" />
-		<echo>*** Building Main Sources ***</echo>
-		<javac srcdir="${src.dir};${shock.src.dir};${bzip2.src.dir}" destdir="${dist.dir}" target="1.5" debug="on" deprecation="on">
-			<classpath refid="classpath" />
-			<!--<compilerarg value="-Xlint:unchecked"/> -->
-		</javac>
-		<echo>*** Building Test Sources ***</echo>
-		<javac srcdir="test" destdir="${dist.dir}" debug="on">
-			<classpath refid="classpath" />
-			<compilerarg value="-Xlint:unchecked"/>
-		</javac>
-	</target>
-
-	<target name="cc-compile">
-		<jjtree 
-			target="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt" 
-			outputdirectory="${src.dir}/org/apache/pig/impl/logicalLayer/parser"
-			javacchome="${basedir}/lib" />
-		<javacc
-			target="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParser.jj" 
-			outputdirectory="${src.dir}/org/apache/pig/impl/logicalLayer/parser"
-			javacchome="${basedir}/lib" />
-		<javacc
-			target="${src.dir}/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj" 
-			outputdirectory="${src.dir}/org/apache/pig/tools/pigscript/parser"
-			javacchome="${basedir}/lib" />
-	</target>
-
-        <target name="lib-compile">
-		<mkdir dir="${dist.dir}" />
-		<echo>*** Building Library Sources ***</echo>
-		<javac srcdir="${shock.src.dir};${bzip2.src.dir}" destdir="${dist.dir}" target="1.5" debug="on">
-			<classpath refid="classpath" />
-		</javac>
-	</target>
-
-	<target name="jar" depends="compile">
-		<jar duplicate="preserve" jarfile="${output.jarfile}" basedir="${dist.dir}">
-			<manifest>
-				<attribute name="Main-Class" value="org.apache.pig.Main"/>
-			</manifest>
-			<zipfileset src="${lib.dir}/junit-4.1.jar"/>								
-			<zipfileset src="${lib.dir}/${hadoop.jarfile}"/>
-			<zipfileset src="${lib.dir}/jsch-0.1.33.jar"/>
-		</jar>
-	</target>
-
-	<target name="test">
-		<mkdir dir="test/reports"/>
-                <junit printsummary="on" haltonfailure="no"
-                       errorProperty="tests.failed" failureProperty="tests.failed">
-			<sysproperty key="ssh.gateway" value="${ssh.gateway}"/>
-			<sysproperty key="hod.server" value="${hod.server}"/>
-			<!-- <sysproperty key="hod.command" value="${hod.command}"/>
-			<sysproperty key="hod.param" value="${hod.param}"/> -->
-			<classpath>
-				<pathelement location="${output.jarfile}"/>
-				<pathelement location="${junit.hadoop.conf}"/>
-				<fileset dir="${lib.dir}">
-					<include name="**/*.jar" />
-					<exclude name="hadoop*.jar" />
-					<include name="${hadoop.jarfile}" />
-				</fileset>
-			</classpath>
-	                <formatter type="${test.junit.output.format}" />
-			<batchtest fork="yes" todir="test/reports">
-				<fileset dir="test">
-					<include name="**/*Test*.java" />
-					<exclude name="**/TestLargeFile.java" />
+    <!-- Load all the default properties, and any the user wants    -->
+    <!-- to contribute (without having to type -D or edit this file -->
+    <property file="${user.home}/build.properties" />
+    <property file="${basedir}/build.properties" />
+
+    <!-- name and version properties -->
+    <property name="name" value="pig" />
+    <property name="Name" value="Pig" />
+    <property name="version" value="0.1.0-dev" />
+    <property name="final.name" value="${name}-${version}" />
+
+    <!-- source properties -->
+    <property name="lib.dir" value="${basedir}/lib/" />
+    <property name="src.dir" value="${basedir}/src/" />
+    <property name="src.lib.dir" value="${basedir}/lib-src/" />
+    <property name="src.gen.dir" value="${basedir}/src-gen/" />
+
+    <!-- javacc properties -->
+    <property name="src.gen.query.parser.dir" value="${src.gen.dir}/org/apache/pig/impl/logicalLayer/parser" />
+    <property name="src.gen.script.parser.dir" value="${src.gen.dir}/org/apache/pig/tools/pigscript/parser" />
+
+    <property name="javacc.home" value="${basedir}/lib" />
+
+    <!-- javac properties -->
+    <property name="javac.debug" value="on" />
+    <property name="javac.optimize" value="on" />
+    <property name="javac.deprecation" value="off" />
+    <property name="javac.version" value="1.5" />
+    <property name="javac.args" value="" />
+ 	<!-- TODO we should use warning...   <property name="javac.args.warnings" value="-Xlint:unchecked" /> -->
+    <property name="javac.args.warnings" value="" />
+    
+    <!-- build properties -->
+    <property name="build.dir" value="${basedir}/build" />
+    <property name="build.classes" value="${build.dir}/classes" />
+    <property name="build.docs" value="${build.dir}/docs" />
+    <property name="build.javadoc" value="${build.docs}/api" />
+    <property name="dist.dir" value="${build.dir}/${final.name}" />
+    <property name="build.encoding" value="ISO-8859-1" />
+    <!-- TODO with only one version of hadoop in the lib folder we do not need that anymore -->
+    <property name="hadoop.jarfile" value="hadoop15.jar" />
+
+    <!-- jar names. TODO we might want to use the svn reversion name in the name in case it is a dev version -->
+    <property name="output.jarfile" value="${build.dir}/${final.name}.jar" />
+    <property name="output.jarfile.core" value="${build.dir}/${final.name}-core.jar" />
+    <!-- Maintain old pig.jar in top level directory. -->
+    <property name="output.jarfile.backcompat" value="${basedir}/${name}.jar" />
+
+    <!-- test properties -->
+    <property name="test.src.dir" value="${basedir}/test" />
+    <property name="test.build.dir" value="${build.dir}/test" />
+    <property name="test.build.classes" value="${test.build.dir}/classes" />
+    <property name="test.log.dir" value="${test.build.dir}/logs" />
+    <property name="test.timeout" value="900000" />
+    <property name="test.junit.output.format" value="plain" />
+
+    <!-- test configuration, use ${user.home}/build.properties to configure values  -->
+    <property name="ssh.gateway" value="" />
+    <property name="hod.server" value="" />
+    <property name="junit.hadoop.conf" value="" />
+    <property name="test.log.dir" value="${basedir}/test/logs"/>
+    <property name="junit.hadoop.conf" value="${user.home}/pigtest/conf/"/>
+
+    <!-- ====================================================== -->
+    <!-- Stuff needed by all targets                            -->
+    <!-- ====================================================== -->
+    <!-- setup the classpath -->
+    <path id="classpath">
+        <fileset file="${lib.dir}/hadoop15.jar" />
+        <fileset file="${lib.dir}/javacc.jar" />
+        <fileset file="${lib.dir}/jsch-0.1.33.jar" />
+        <fileset file="${lib.dir}/junit-4.1.jar" />
+    </path>
+    
+    <path id="test.classpath">
+      <pathelement location="${build.classes}"/>
+      <pathelement location="${test.src.dir}"/>
+      <path refid="classpath"/>
+    </path>
+
+        
+    <target name="init">
+        <mkdir dir="${src.gen.query.parser.dir}" />
+        <mkdir dir="${src.gen.script.parser.dir}" />
+        <mkdir dir="${dist.dir}" />
+        <mkdir dir="${build.classes}" />
+        <mkdir dir="${test.build.classes}" />
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Clean.  Delete the build files, and their directories              -->
+    <!-- ================================================================== -->
+    <target name="clean">
+        <delete dir="${src.gen.dir}" />
+        <delete dir="${build.dir}" />
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Java Compiler Compiler, generate Parsers                           -->
+    <!-- ================================================================== -->
+    <target name="cc-compile" depends="init">
+        <jjtree target="${src.dir}/org/apache/pig/impl/logicalLayer/parser/QueryParser.jjt" outputdirectory="${src.gen.query.parser.dir}" javacchome="${javacc.home}" />
+        <javacc target="${src.gen.query.parser.dir}/QueryParser.jj" outputdirectory="${src.gen.query.parser.dir}" javacchome="${javacc.home}" />
+        <javacc target="${src.dir}/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj" outputdirectory="${src.gen.script.parser.dir}" javacchome="${javacc.home}" />
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Build sources                                          -->
+    <!-- ================================================================== -->
+    <target name="compile" depends="init, cc-compile">
+        <echo>*** Building Main Sources ***</echo>
+        <antcall target="compile-sources">
+            <param name="sources" value="${src.dir};${src.gen.dir};${src.lib.dir}/shock;${src.lib.dir}/bzip2" />
+            <param name="dist" value="${build.classes}" />
+            <param name="cp" value="classpath" />
+        </antcall>
+    </target>
+
+    <target name="compile-test" depends="compile">
+       
+       
+        
+        <echo>*** Building Test Sources ***</echo>
+        <antcall target="compile-sources">
+            <param name="sources" value="${test.src.dir}" />
+            <param name="dist" value="${test.build.classes}" />
+            <param name="cp" value="test.classpath" />
+        </antcall>
+    </target>
+
+    <target name="compile-sources">
+        <javac encoding="${build.encoding}" srcdir="${sources}" includes="**/*.java" destdir="${dist}" debug="${javac.debug}" optimize="${javac.optimize}" target="${javac.version}" source="${javac.version}" deprecation="${javac.deprecation}">
+            <compilerarg line="${javac.args} ${javac.args.warnings}" />
+            <classpath refid="${cp}" />
+        </javac>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Documentation                                                      -->
+    <!-- ================================================================== -->
+    <target name="javadoc" depends="init">
+        <mkdir dir="${build.javadoc}" />
+        <javadoc overview="${src.dir}/overview.html" packagenames="org.apache.pig.*" destdir="${build.javadoc}" author="true" version="true" use="true" windowtitle="${Name} ${version} API" doctitle="${Name} ${version} API" bottom="Copyright &amp;copy; ${year} The Apache Software Foundation">
+            <packageset dir="${src.dir}" />
+            <link href="${javadoc.link.java}" />
+            <classpath>
+                <path refid="classpath" />
+                <pathelement path="${java.class.path}" />
+            </classpath>
+            <group title="pig" packages="org.apache.*" />
+        </javadoc>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- @depricated, Documentation                                         -->
+    <!-- ================================================================== -->
+    <target name="doc" depends="javadoc">
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Make pig.jar                                                       -->
+    <!-- ================================================================== -->
+    <!-- TODO we should also exculte test here...                           -->
+    <!-- ================================================================== -->
+    <target name="jar" depends="compile">
+        <jar jarfile="${output.jarfile.core}" basedir="${build.classes}">
+            <manifest>
+                <attribute name="Main-Class" value="org.apache.pig.Main" />
+                <section name="org/apache/pig">
+                    <attribute name="Implementation-Title" value="Pig" />
+                    <attribute name="Implementation-Version" value="${version}" />
+                    <attribute name="Implementation-Vendor" value="Apache" />
+                </section>
+            </manifest>
+        </jar>
+       <!-- @depricated -->
+        <jar jarfile="${output.jarfile}" basedir="${build.classes}">
+            <manifest>
+                <attribute name="Main-Class" value="org.apache.pig.Main" />
+                <section name="org/apache/pig">
+                    <attribute name="Implementation-Title" value="Pig" />
+                    <attribute name="Implementation-Version" value="${version}" />
+                    <attribute name="Implementation-Vendor" value="Apache" />
+                </section>
+            </manifest>
+            <zipfileset src="${lib.dir}/junit-4.1.jar" />
+            <zipfileset src="${lib.dir}/${hadoop.jarfile}" />
+            <zipfileset src="${lib.dir}/jsch-0.1.33.jar" />
+        </jar>
+        <copy file="${output.jarfile}" tofile="${output.jarfile.backcompat}"/>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Run unit tests                                                     -->
+    <!-- ================================================================== -->
+    <target name="test" depends="compile-test,jar">
+        <delete dir="${test.log.dir}"/>
+        <mkdir dir="${test.log.dir}"/>
+        <junit showoutput="${test.output}" printsummary="yes" haltonfailure="no" fork="yes" maxmemory="256m" dir="${basedir}" timeout="${test.timeout}" errorProperty="tests.failed" failureProperty="tests.failed">
+            <sysproperty key="ssh.gateway" value="${ssh.gateway}" />
+            <sysproperty key="hod.server" value="${hod.server}" />
+            <!-- <sysproperty key="hod.command" value="${hod.command}"/>
+            			<sysproperty key="hod.param" value="${hod.param}"/> -->
+            <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
+            <classpath>
+                <pathelement location="${output.jarfile}" />
+                <pathelement location="${test.build.classes}" />
+                <pathelement location="${junit.hadoop.conf}" />
+                <path refid="classpath"/>
+            </classpath>
+            <formatter type="${test.junit.output.format}" />
+            <batchtest fork="yes" todir="${test.log.dir}">
+                <fileset dir="test">
+                    <include name="**/*Test*.java" />
+                    <exclude name="**/TestLargeFile.java" />
                     <exclude name="**/TestOrderBy.java" />
                     <exclude name="**/TestPi.java" />
+                    <exclude name="**/nightly/**" />
+                </fileset>
+            </batchtest>
+        </junit>
+        <fail if="tests.failed">Tests failed!</fail>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- D I S T R I B U T I O N                                            -->
+    <!-- ================================================================== -->
+    <target name="package" depends="jar, javadoc">
+        <mkdir dir="${dist.dir}" />
+        <mkdir dir="${dist.dir}/lib" />
+        <mkdir dir="${dist.dir}/scripts" />
+        <mkdir dir="${dist.dir}/docs" />
+        <mkdir dir="${dist.dir}/docs/api" />
+
+        <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
+            <fileset dir="${lib.dir}">
+            </fileset>
+        </copy>
 
-					<exclude name="**/nightly/**" />
-				</fileset>
-			</batchtest>
-		</junit>
-		<fail if="tests.failed">Tests failed!</fail>
-	</target>
-
-	<target name="doc">
-		<mkdir dir="doc"/>
-		<echo>*** Building Java Docs ***</echo>
-		<javadoc packagenames="org.apache.pig.*" sourcepath="${src.dir}" destdir="${doc.dir}">
-                        <classpath>
-                                <path refid="classpath"/>
-                                <pathelement path="${java.class.path}"/>
-                        </classpath>
-                </javadoc>
-	</target>
+        <copy file="${output.jarfile.core}" todir="${dist.dir}" />
+
+        <copy todir="${dist.dir}/scripts">
+            <fileset dir="scripts" />
+        </copy>
+
+        <copy todir="${dist.dir}/docs">
+            <fileset dir="${build.docs}" />
+        </copy>
+
+        <copy todir="${dist.dir}/src" includeEmptyDirs="true">
+            <fileset dir="${src.dir}" />
+            <fileset dir="${src.gen.dir}" />
+        </copy>
+
+        <copy todir="${dist.dir}/" file="build.xml" />
+
+        <copy todir="${dist.dir}">
+            <fileset dir=".">
+                <include name="*.txt" />
+            </fileset>
+        </copy>
+
+        <chmod perm="ugo+x" type="file">
+            <fileset dir="${dist.dir}/scripts" />
+        </chmod>
+
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Make release tarball                                               -->
+    <!-- ================================================================== -->
+    <target name="tar" depends="package">
+        <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${final.name}.tar.gz">
+            <tarfileset dir="${dist.dir}" mode="664">
+                <exclude name="scripts/*" />
+                <include name="**" />
+            </tarfileset>
+            <tarfileset dir="${dist.dir}" mode="755">
+                <include name="scripts/*" />
+            </tarfileset>
+        </tar>
+    </target>
 
 </project>

Modified: incubator/pig/branches/types/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java (original)
+++ incubator/pig/branches/types/lib-src/bzip2/org/apache/tools/bzip2r/CBZip2OutputStream.java Tue Feb 26 16:51:49 2008
@@ -63,6 +63,9 @@
 import java.io.OutputStream;
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 /**
  * An output stream that compresses into the BZip2 format (without the file
  * header chars) into another stream.
@@ -72,6 +75,9 @@
  * TODO:    Update to BZip2 1.0.1
  */
 public class CBZip2OutputStream extends OutputStream implements BZip2Constants {
+
+    private final static Log log = LogFactory.getLog(CBZip2OutputStream.class);
+    
     protected static final int SETMASK = (1 << 21);
     protected static final int CLEARMASK = (~SETMASK);
     protected static final int GREATER_ICOST = 15;
@@ -90,7 +96,7 @@
     protected static final int QSORT_STACK_SIZE = 1000;
 
     private static void panic() {
-        System.out.println("panic");
+        log.info("panic");
         //throw new CError();
     }
 

Modified: incubator/pig/branches/types/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java (original)
+++ incubator/pig/branches/types/lib-src/shock/org/apache/pig/shock/SSHSocketImplFactory.java Tue Feb 26 16:51:49 2008
@@ -38,6 +38,9 @@
 import java.util.HashMap;
 import java.util.Properties;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 import com.jcraft.jsch.ChannelDirectTCPIP;
 import com.jcraft.jsch.ChannelExec;
 import com.jcraft.jsch.JSch;
@@ -67,6 +70,9 @@
  *
  */
 public class SSHSocketImplFactory implements SocketImplFactory, Logger {
+    
+    private static final Log log = LogFactory.getLog(SSHSocketImplFactory.class);
+
 	Session session;
 
 	public static SSHSocketImplFactory getFactory() throws JSchException, IOException {
@@ -136,7 +142,7 @@
 	}
 
 	public void log(int arg0, String arg1) {
-		System.err.println(arg0 + ": " + arg1);
+		log.error(arg0 + ": " + arg1);
 	}
 
 	class SSHProcess extends Process {
@@ -226,6 +232,8 @@
  */
 class SSHSocketFactory implements SocketFactory {
 
+    private final static Log log = LogFactory.getLog(SSHSocketFactory.class);
+    
 	public Socket createSocket(String host, int port) throws IOException,
 			UnknownHostException {
 		String socksHost = System.getProperty("socksProxyHost");
@@ -237,7 +245,7 @@
 			s = new Socket(proxy);
 			s.connect(addr);
 		} else {
-			System.err.println(addr);
+			log.error(addr);
 			SocketChannel sc = SocketChannel.open(addr);
 			s = sc.socket();
 		}
@@ -302,6 +310,9 @@
  * things to SSH.
  */
 class SSHSocketImpl extends SocketImpl {
+    
+    private static final Log log = LogFactory.getLog(SSHSocketImpl.class);
+
 	Session session;
 
 	ChannelDirectTCPIP channel;
@@ -384,13 +395,13 @@
 							(PipedInputStream) is));
 			channel.connect();
 			if (!channel.isConnected()) {
-				System.err.println("Not connected");
+				log.error("Not connected");
 			}
 			if (channel.isEOF()) {
-				System.err.println("EOF");
+				log.error("EOF");
 			}
 		} catch (JSchException e) {
-			e.printStackTrace();
+			log.error(e);
 			IOException newE = new IOException(e.getMessage());
 			newE.setStackTrace(e.getStackTrace());
 			throw newE;
@@ -445,34 +456,34 @@
 					@Override
 					public void run() {
 						try {
-							System.err.println("Starting " + this);
+							log.error("Starting " + this);
 							connectTest("www.yahoo.com");
-							System.err.println("Finished " + this);
+							log.error("Finished " + this);
 						} catch (Exception e) {
-							e.printStackTrace();
+							log.error(e);
 						}
 					}
 				}.start();
 			}
 			Thread.sleep(1000000);
 			connectTest("www.news.com");
-			System.out.println("******** Starting PART II");
+			log.info("******** Starting PART II");
 			for (int i = 0; i < 10; i++) {
 				new Thread() {
 					@Override
 					public void run() {
 						try {
-							System.err.println("Starting " + this);
+							log.error("Starting " + this);
 							connectTest("www.flickr.com");
-							System.err.println("Finished " + this);
+							log.error("Finished " + this);
 						} catch (Exception e) {
-							e.printStackTrace();
+							log.error(e);
 						}
 					}
 				}.start();
 			}
 		} catch (Exception e) {
-			e.printStackTrace();
+			log.error(e);
 		}
 	}
 

Modified: incubator/pig/branches/types/lib/hadoop15.jar
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/lib/hadoop15.jar?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
Binary files - no diff available.

Modified: incubator/pig/branches/types/scripts/pig.pl
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/scripts/pig.pl?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/scripts/pig.pl (original)
+++ incubator/pig/branches/types/scripts/pig.pl Tue Feb 26 16:51:49 2008
@@ -3,6 +3,7 @@
 use strict;
 use File::Find;
 use English;
+use File::Basename;
 
 sub processClasspath();
 sub getJar($);
@@ -21,14 +22,24 @@
 our $ROOT = (defined($ENV{'ROOT'}) ? $ENV{'ROOT'} : "/home/y");
 my ($pigJarRoot, $hodRoot, $defaultCluster);
 
-open(CFG, "< $ROOT/conf/pigclient.conf") or
-	die "Can't open $ROOT/conf/pigclient.conf, $ERRNO\n";
-
-my $cfgContents;
-$cfgContents .= $_ while (<CFG>);
-close(CFG);
+if ( -e "$ROOT/conf/pigclient.conf") {
+	open(CFG, "< $ROOT/conf/pigclient.conf") or
+		die "Can't open $ROOT/conf/pigclient.conf, $ERRNO\n";
+	my $cfgContents;
+	$cfgContents .= $_ while (<CFG>);
+	close(CFG);
+	eval("$cfgContents");
+} else {# use defaults for what can be set in pigclient.conf
+	#$pigJarRoot - assume the directory above this script
+	$pigJarRoot = dirname $0;
+	$pigJarRoot .= '/../';
+	#$defaultCluster - defined so processClasspath() will hunt around
+	#for a jar file
+	$defaultCluster = '';
+	#$hodRoot - leave this undefined - we assume that hod is not require
+	#by default
+}
 
-eval("$cfgContents");
 
 for (my $i = 0; $i < @ARGV; ) {
 	if ($ARGV[$i] eq "-cp" || $ARGV[$i] eq "-classpath") {
@@ -88,8 +99,10 @@
 	# location.
 	if (-e "/usr/releng/tools/java/current/bin/java") {
 		$java = '/usr/releng/tools/java/current/bin/java';
+	} elsif (exists $ENV{JAVA_HOME}) {
+		$java = $ENV{JAVA_HOME}.'/bin/java';
 	} else {
-		die "I can't find java, please include it in your PATH.\n";
+		die "I can't find java, please include it in your PATH or set JAVA_HOME\n";
 	}
 }
 
@@ -153,7 +166,9 @@
 			{
 				$classpath .= ":";
 			}
-			$classpath .= getJar("pig.jar") .":" . getConfigDir(); 
+			my $pigJar = getJar("pig.jar");
+			$pigJar = "$pigJarRoot/pig.jar" unless -e $pigJar;
+			$classpath .= $pigJar .":" . getConfigDir(); 
 			
 		}
 	}

Modified: incubator/pig/branches/types/src/org/apache/pig/Algebraic.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/Algebraic.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/Algebraic.java (original)
+++ incubator/pig/branches/types/src/org/apache/pig/Algebraic.java Tue Feb 26 16:51:49 2008
@@ -33,17 +33,17 @@
  *
  */
 public interface Algebraic{
-	
-	/**
-	 * 
-	 * @return A string to instatiate f_init. f_init should be an eval func 
-	 */
-	public String getInitial();
+    
+    /**
+     * 
+     * @return A string to instatiate f_init. f_init should be an eval func 
+     */
+    public String getInitial();
 
-	/**
-	 * 
-	 * @return A string to instantiate f_intermed. f_intermed should be an eval func
-	 */
+    /**
+     * 
+     * @return A string to instantiate f_intermed. f_intermed should be an eval func
+     */
     public String getIntermed();
 
     /**

Modified: incubator/pig/branches/types/src/org/apache/pig/EvalFunc.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/EvalFunc.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/EvalFunc.java (original)
+++ incubator/pig/branches/types/src/org/apache/pig/EvalFunc.java Tue Feb 26 16:51:49 2008
@@ -1,3 +1,4 @@
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -43,74 +44,67 @@
  *
  */
 public abstract class EvalFunc<T>  {
-	
-	protected Type returnType;
-	
-	public EvalFunc(){
-		
-		//Figure out what the return type is by following the object hierarchy upto the EvalFunc
-		
-		Class<?> superClass = getClass();
-		Type superType = getClass();
-		
-		while (!superClass.isAssignableFrom(EvalFunc.class)){
-			superType = superClass.getGenericSuperclass();
-			superClass = superClass.getSuperclass();
-		}
-		String errMsg = getClass() + "extends the raw type EvalFunc. It should extend the parameterized type EvalFunc<T> instead.";
-		
-		if (!(superType instanceof ParameterizedType))
-			throw new RuntimeException(errMsg);
-		
-		Type[] parameters  = ((ParameterizedType)superType).getActualTypeArguments();
-		
-		if (parameters.length != 1)
-				throw new RuntimeException(errMsg);
-		
-		returnType = parameters[0];
-		
-        /*
-		if (returnType == Datum.class){
-			throw new RuntimeException("Eval function must return a specific type of Datum");
-		}
-        */
-		
-		
-		//Type check the initial, intermediate, and final functions
-		if (this instanceof Algebraic){
-			Algebraic a = (Algebraic)this;
-			
-			errMsg = "function of " + getClass().getName() + " is not of the expected type.";
-			if (getReturnTypeFromSpec(a.getInitial()) != Tuple.class)
-				throw new RuntimeException("Initial " + errMsg);
-			if (getReturnTypeFromSpec(a.getIntermed()) != Tuple.class)
-					throw new RuntimeException("Intermediate " + errMsg);
-			if (getReturnTypeFromSpec(a.getFinal()) != returnType)
-					throw new RuntimeException("Final " + errMsg);
-		}
-		
-	}
-	
-
-	private Type getReturnTypeFromSpec(String funcSpec){
-		try{
-			return ((EvalFunc)PigContext.instantiateFuncFromSpec(funcSpec)).getReturnType();
-		}catch (IOException e){
-			throw new RuntimeException(e);
-		}catch (ClassCastException e){
-			throw new RuntimeException(funcSpec + " does not specify an eval func");
-		}
-	}
-	
-	public Type getReturnType(){
-		return returnType;
-	}
-		
+    
+    protected Type returnType;
+    
+    public EvalFunc(){
+        
+        //Figure out what the return type is by following the object hierarchy upto the EvalFunc
+        
+        Class<?> superClass = getClass();
+        Type superType = getClass();
+        
+        while (!superClass.isAssignableFrom(EvalFunc.class)){
+            superType = superClass.getGenericSuperclass();
+            superClass = superClass.getSuperclass();
+        }
+        String errMsg = getClass() + "extends the raw type EvalFunc. It should extend the parameterized type EvalFunc<T> instead.";
+        
+        if (!(superType instanceof ParameterizedType))
+            throw new RuntimeException(errMsg);
+        
+        Type[] parameters  = ((ParameterizedType)superType).getActualTypeArguments();
+        
+        if (parameters.length != 1)
+                throw new RuntimeException(errMsg);
+        
+        returnType = parameters[0];
+        
+        
+        
+        //Type check the initial, intermediate, and final functions
+        if (this instanceof Algebraic){
+            Algebraic a = (Algebraic)this;
+            
+            errMsg = "function of " + getClass().getName() + " is not of the expected type.";
+            if (getReturnTypeFromSpec(a.getInitial()) != Tuple.class)
+                throw new RuntimeException("Initial " + errMsg);
+            if (getReturnTypeFromSpec(a.getIntermed()) != Tuple.class)
+                    throw new RuntimeException("Intermediate " + errMsg);
+            if (getReturnTypeFromSpec(a.getFinal()) != returnType)
+                    throw new RuntimeException("Final " + errMsg);
+        }
+        
+    }
+    
+
+    private Type getReturnTypeFromSpec(String funcSpec){
+        try{
+            return ((EvalFunc)PigContext.instantiateFuncFromSpec(funcSpec)).getReturnType();
+        }catch (ClassCastException e){
+            throw new RuntimeException(funcSpec + " does not specify an eval func", e);
+        }
+    }
+    
+    public Type getReturnType(){
+        return returnType;
+    }
+        
     // report that progress is being made (otherwise hadoop times out after 600 seconds working on one outer tuple)
     protected void progress() { 
         //This part appears to be unused and is causing problems due to changing hadoop signature
-    	/*
-    	if (PigMapReduce.reporter != null) {
+        /*
+        if (PigMapReduce.reporter != null) {
             try {
                 PigMapReduce.reporter.progress();
             } catch (IOException ignored) {
@@ -155,6 +149,6 @@
      * @return
      */
     public boolean isAsynchronous(){
-    	return false;
+        return false;
     }
 }

Modified: incubator/pig/branches/types/src/org/apache/pig/FilterFunc.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/FilterFunc.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/FilterFunc.java (original)
+++ incubator/pig/branches/types/src/org/apache/pig/FilterFunc.java Tue Feb 26 16:51:49 2008
@@ -24,7 +24,7 @@
 
 public abstract class FilterFunc {
     
-				/**
+                /**
      * This callback method must be implemented by all subclasses. This
      * is the method that will be invoked on every Tuple of a given dataset.
      * Since the dataset may be divided up in a variety of ways the programmer

Modified: incubator/pig/branches/types/src/org/apache/pig/LoadFunc.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/LoadFunc.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/LoadFunc.java (original)
+++ incubator/pig/branches/types/src/org/apache/pig/LoadFunc.java Tue Feb 26 16:51:49 2008
@@ -24,42 +24,42 @@
 
 
 public interface LoadFunc {
-	/**
-	 * This interface is used to implement functions to parse records
-	 * from a dataset.
-	 * 
-	 * @author database-systems@research.yahoo
-	 *
-	 */
-	/**
-	 * Specifies a portion of an InputStream to read tuples. Because the
-	 * starting and ending offsets may not be on record boundaries it is up to
-	 * the implementor to deal with figuring out the actual starting and ending
-	 * offsets in such a way that an arbitrarily sliced up file will be processed
-	 * in its entirety.
-	 * <p>
-	 * A common way of handling slices in the middle of records is to start at
-	 * the given offset and, if the offset is not zero, skip to the end of the
-	 * first record (which may be a partial record) before reading tuples.
-	 * Reading continues until a tuple has been read that ends at an offset past
-	 * the ending offset.
-	 * <p>
-	 * <b>The load function should not do any buffering on the input stream</b>. Buffering will
-	 * cause the offsets returned by is.getPos() to be unreliable.
-	 *  
-	 * @param fileName the name of the file to be read
-	 * @param is the stream representing the file to be processed, and which can also provide its position.
-	 * @param offset the offset to start reading tuples.
-	 * @param end the ending offset for reading.
-	 * @throws IOException
-	 */
-	public abstract void bindTo(String fileName, BufferedPositionedInputStream is, long offset, long end) throws IOException;
-	/**
-	 * Retrieves the next tuple to be processed.
-	 * @return the next tuple to be processed or null if there are no more tuples
-	 * to be processed.
-	 * @throws IOException
-	 */
-	public abstract Tuple getNext() throws IOException;
-	
+    /**
+     * This interface is used to implement functions to parse records
+     * from a dataset.
+     * 
+     * @author database-systems@research.yahoo
+     *
+     */
+    /**
+     * Specifies a portion of an InputStream to read tuples. Because the
+     * starting and ending offsets may not be on record boundaries it is up to
+     * the implementor to deal with figuring out the actual starting and ending
+     * offsets in such a way that an arbitrarily sliced up file will be processed
+     * in its entirety.
+     * <p>
+     * A common way of handling slices in the middle of records is to start at
+     * the given offset and, if the offset is not zero, skip to the end of the
+     * first record (which may be a partial record) before reading tuples.
+     * Reading continues until a tuple has been read that ends at an offset past
+     * the ending offset.
+     * <p>
+     * <b>The load function should not do any buffering on the input stream</b>. Buffering will
+     * cause the offsets returned by is.getPos() to be unreliable.
+     *  
+     * @param fileName the name of the file to be read
+     * @param is the stream representing the file to be processed, and which can also provide its position.
+     * @param offset the offset to start reading tuples.
+     * @param end the ending offset for reading.
+     * @throws IOException
+     */
+    public abstract void bindTo(String fileName, BufferedPositionedInputStream is, long offset, long end) throws IOException;
+    /**
+     * Retrieves the next tuple to be processed.
+     * @return the next tuple to be processed or null if there are no more tuples
+     * to be processed.
+     * @throws IOException
+     */
+    public abstract Tuple getNext() throws IOException;
+    
 }

Modified: incubator/pig/branches/types/src/org/apache/pig/Main.java
URL: http://svn.apache.org/viewvc/incubator/pig/branches/types/src/org/apache/pig/Main.java?rev=631443&r1=631442&r2=631443&view=diff
==============================================================================
--- incubator/pig/branches/types/src/org/apache/pig/Main.java (original)
+++ incubator/pig/branches/types/src/org/apache/pig/Main.java Tue Feb 26 16:51:49 2008
@@ -23,7 +23,8 @@
 import java.util.jar.*;
 import java.text.ParseException;
 
-import org.apache.hadoop.util.HadoopExe;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 import org.apache.log4j.BasicConfigurator;
 import org.apache.log4j.Logger;
@@ -33,7 +34,6 @@
 import org.apache.log4j.PropertyConfigurator;
 import org.apache.pig.PigServer.ExecType;
 import org.apache.pig.impl.PigContext;
-import org.apache.pig.impl.util.PigLogger;
 import org.apache.pig.impl.logicalLayer.LogicalPlanBuilder;
 import org.apache.pig.tools.cmdline.CmdLineParser;
 import org.apache.pig.tools.grunt.Grunt;
@@ -43,9 +43,10 @@
 public class Main
 {
 
+    private final static Log log = LogFactory.getLog(Main.class);
     
 private enum ExecMode {STRING, FILE, SHELL, UNKNOWN};
-	            
+                
 /**
  * The Main-Class for the Pig Jar that will provide a shell and setup a classpath appropriate
  * for executing Jar files.
@@ -58,37 +59,37 @@
  */
 public static void main(String args[])
 {
-	int rc = 1;
-	PigContext pigContext = new PigContext();
+    int rc = 1;
+    PigContext pigContext = new PigContext();
 
-	try {
-		BufferedReader in = null;
-		ExecMode mode = ExecMode.UNKNOWN;
-		int port = 0;
-		String file = null;
-		Level logLevel = Level.INFO;
+    try {
+        BufferedReader in = null;
+        ExecMode mode = ExecMode.UNKNOWN;
+        int port = 0;
+        String file = null;
+        Level logLevel = Level.INFO;
         boolean brief = false;
         String log4jconf = null;
-		boolean verbose = false;
+        boolean verbose = false;
 
-		CmdLineParser opts = new CmdLineParser(args);
-		// Don't use -l, --latest, -c, --cluster, -cp, -classpath, -D as these
-		// are masked by the startup perl script.
+        CmdLineParser opts = new CmdLineParser(args);
+        // Don't use -l, --latest, -c, --cluster, -cp, -classpath, -D as these
+        // are masked by the startup perl script.
         opts.registerOpt('4', "log4jconf", CmdLineParser.ValueExpected.REQUIRED);
         opts.registerOpt('b', "brief", CmdLineParser.ValueExpected.NOT_ACCEPTED);
-		opts.registerOpt('c', "cluster", CmdLineParser.ValueExpected.REQUIRED);
-		opts.registerOpt('d', "debug", CmdLineParser.ValueExpected.REQUIRED);
-		opts.registerOpt('e', "execute", CmdLineParser.ValueExpected.NOT_ACCEPTED);
-		opts.registerOpt('f', "file", CmdLineParser.ValueExpected.REQUIRED);
-		opts.registerOpt('h', "help", CmdLineParser.ValueExpected.NOT_ACCEPTED);
-		opts.registerOpt('o', "hod", CmdLineParser.ValueExpected.NOT_ACCEPTED);
-		opts.registerOpt('j', "jar", CmdLineParser.ValueExpected.REQUIRED);
-		opts.registerOpt('v', "verbose", CmdLineParser.ValueExpected.NOT_ACCEPTED);
-		opts.registerOpt('x', "exectype", CmdLineParser.ValueExpected.REQUIRED);
-
-		char opt;
-		while ((opt = opts.getNextOpt()) != CmdLineParser.EndOfOpts) {
-			switch (opt) {
+        opts.registerOpt('c', "cluster", CmdLineParser.ValueExpected.REQUIRED);
+        opts.registerOpt('d', "debug", CmdLineParser.ValueExpected.REQUIRED);
+        opts.registerOpt('e', "execute", CmdLineParser.ValueExpected.NOT_ACCEPTED);
+        opts.registerOpt('f', "file", CmdLineParser.ValueExpected.REQUIRED);
+        opts.registerOpt('h', "help", CmdLineParser.ValueExpected.NOT_ACCEPTED);
+        opts.registerOpt('o', "hod", CmdLineParser.ValueExpected.NOT_ACCEPTED);
+        opts.registerOpt('j', "jar", CmdLineParser.ValueExpected.REQUIRED);
+        opts.registerOpt('v', "verbose", CmdLineParser.ValueExpected.NOT_ACCEPTED);
+        opts.registerOpt('x', "exectype", CmdLineParser.ValueExpected.REQUIRED);
+
+        char opt;
+        while ((opt = opts.getNextOpt()) != CmdLineParser.EndOfOpts) {
+            switch (opt) {
             case '4':
                 log4jconf = opts.getValStr();
                 break;
@@ -97,95 +98,95 @@
                 brief = true;
                 break;
 
-			case 'c': {
-				// Needed away to specify the cluster to run the MR job on
-				// Bug 831708 - fixed
-			   	String cluster = opts.getValStr();
-			   	System.out.println("Changing MR cluster to " + cluster);
-			   	if(cluster.indexOf(':') < 0) {
-				   	cluster = cluster + ":50020";
-			   	}
-			   	pigContext.setJobtrackerLocation(cluster);
-				break;
-					  }
-
-			case 'd':
-				logLevel = Level.toLevel(opts.getValStr(), Level.INFO);
-				break;
-				
-			case 'e': 
-				mode = ExecMode.STRING;
-				break;
-
-			case 'f':
-				mode = ExecMode.FILE;
-				file = opts.getValStr();
-				break;
-
-			case 'h':
-				usage();
-				return;
-
-			case 'j': {
-			   	String splits[] = opts.getValStr().split(":", -1);
-			   	for (int i = 0; i < splits.length; i++) {
-				   	if (splits[i].length() > 0) {
-						pigContext.addJar(splits[i]);
-				   	}
-			   	}
-				break;
-					  }
-
-			case 'o': {
-			   	String gateway = System.getProperty("ssh.gateway");
-			   	if (gateway == null || gateway.length() == 0) {
-			   		System.setProperty("hod.server", "local");
-			   	} else {
-			   		System.setProperty("hod.server", System.getProperty("ssh.gateway"));
-			   	}
-				break;
-					  }
-
-			case 'v':
-				verbose = true;
-				break;
-
-			case 'x':
-				ExecType exectype;
-			   	try {
-				   	exectype = PigServer.parseExecType(opts.getValStr());
-			   	} catch (IOException e) {
-				   	throw new RuntimeException("ERROR: Unrecognized exectype.");
-			   	}
-			   	pigContext.setExecType(exectype);
-				break;
-
-			default: {
-				Character cc = new Character(opt);
-				throw new AssertionError("Unhandled option " + cc.toString());
-					 }
-			}
-		}
+            case 'c': {
+                // Needed away to specify the cluster to run the MR job on
+                // Bug 831708 - fixed
+                   String cluster = opts.getValStr();
+                   System.out.println("Changing MR cluster to " + cluster);
+                   if(cluster.indexOf(':') < 0) {
+                       cluster = cluster + ":50020";
+                   }
+                   pigContext.setJobtrackerLocation(cluster);
+                break;
+                      }
+
+            case 'd':
+                logLevel = Level.toLevel(opts.getValStr(), Level.INFO);
+                break;
+                
+            case 'e': 
+                mode = ExecMode.STRING;
+                break;
+
+            case 'f':
+                mode = ExecMode.FILE;
+                file = opts.getValStr();
+                break;
+
+            case 'h':
+                usage();
+                return;
+
+            case 'j': {
+                   String splits[] = opts.getValStr().split(":", -1);
+                   for (int i = 0; i < splits.length; i++) {
+                       if (splits[i].length() > 0) {
+                        pigContext.addJar(splits[i]);
+                       }
+                   }
+                break;
+                      }
+
+            case 'o': {
+                   String gateway = System.getProperty("ssh.gateway");
+                   if (gateway == null || gateway.length() == 0) {
+                       System.setProperty("hod.server", "local");
+                   } else {
+                       System.setProperty("hod.server", System.getProperty("ssh.gateway"));
+                   }
+                break;
+                      }
+
+            case 'v':
+                verbose = true;
+                break;
+
+            case 'x':
+                ExecType exectype;
+                   try {
+                       exectype = PigServer.parseExecType(opts.getValStr());
+                   } catch (IOException e) {
+                       throw new RuntimeException("ERROR: Unrecognized exectype.", e);
+                   }
+                   pigContext.setExecType(exectype);
+                break;
+
+            default: {
+                Character cc = new Character(opt);
+                throw new AssertionError("Unhandled option " + cc.toString());
+                     }
+            }
+        }
 
-		LogicalPlanBuilder.classloader = pigContext.createCl(null);
+        LogicalPlanBuilder.classloader = pigContext.createCl(null);
 
-		if (log4jconf != null) {
+        if (log4jconf != null) {
             PropertyConfigurator.configure(log4jconf);
-		} else if (!brief) {
-		    // non-brief logging - timestamps
-		    Properties props = new Properties();
-		    props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE");
+        } else if (!brief) {
+            // non-brief logging - timestamps
+            Properties props = new Properties();
+            props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE");
             props.setProperty("log4j.appender.PIGCONSOLE",
                               "org.apache.log4j.ConsoleAppender");
             props.setProperty("log4j.appender.PIGCONSOLE.layout",
                               "org.apache.log4j.PatternLayout");
             props.setProperty("log4j.appender.PIGCONSOLE.layout.ConversionPattern",
                               "%d [%t] %-5p %c - %m%n");
-    	    PropertyConfigurator.configure(props);
+            PropertyConfigurator.configure(props);
             // Set the log level/threshold
             Logger.getRootLogger().setLevel(verbose ? Level.ALL : logLevel);
-		} else {
-		    // brief logging - no timestamps
+        } else {
+            // brief logging - no timestamps
             Properties props = new Properties();
             props.setProperty("log4j.rootLogger", "INFO, PIGCONSOLE");
             props.setProperty("log4j.appender.PIGCONSOLE",
@@ -197,113 +198,99 @@
             PropertyConfigurator.configure(props);
             // Set the log level/threshold
             Logger.getRootLogger().setLevel(verbose ? Level.ALL : logLevel);
-		}
+        }
+
+        // TODO Add a file appender for the logs
+        // TODO Need to create a property in the properties file for it.
 
-		// TODO Add a file appender for the logs
-		// TODO Need to create a property in the properties file for it.
+        // Don't forget to undo all this for the port option.
 
-		// Don't forget to undo all this for the port option.
+        // I might know what I want to do next, then again I might not.
+        Grunt grunt = null;
+        switch (mode) {
+        case FILE:
+            // Run, using the provided file as a pig file
+            in = new BufferedReader(new FileReader(file));
+            grunt = new Grunt(in, pigContext);
+            grunt.exec();
+            return;
+
+        case STRING: {
+            // Gather up all the remaining arguments into a string and pass them into
+            // grunt.
+            StringBuffer sb = new StringBuffer();
+            String remainders[] = opts.getRemainingArgs();
+            for (int i = 0; i < remainders.length; i++) {
+                if (i != 0) sb.append(' ');
+                sb.append(remainders[i]);
+            }
+            in = new BufferedReader(new StringReader(sb.toString()));
+            grunt = new Grunt(in, pigContext);
+            grunt.exec();
+            rc = 0;
+            return;
+                     }
+
+        default:
+            break;
+        }
 
-		// I might know what I want to do next, then again I might not.
-		Grunt grunt = null;
-		switch (mode) {
-		case FILE:
-			// Run, using the provided file as a pig file
-			in = new BufferedReader(new FileReader(file));
-			grunt = new Grunt(in, pigContext);
-			grunt.exec();
-			return;
-
-		case STRING: {
-			// Gather up all the remaining arguments into a string and pass them into
-			// grunt.
-			StringBuffer sb = new StringBuffer();
-			String remainders[] = opts.getRemainingArgs();
-			for (int i = 0; i < remainders.length; i++) {
-				if (i != 0) sb.append(' ');
-				sb.append(remainders[i]);
-			}
-			in = new BufferedReader(new StringReader(sb.toString()));
-			grunt = new Grunt(in, pigContext);
-			grunt.exec();
-			rc = 0;
-			return;
-					 }
-
-		default:
-			break;
-		}
-
-		// If we're here, we don't know yet what they want.  They may have just
-		// given us a jar to execute, they might have given us a pig script to
-		// execute, or they might have given us a dash (or nothing) which means to
-		// run grunt interactive.
-		String remainders[] = opts.getRemainingArgs();
-		if (remainders == null) {
-			// Interactive
-			mode = ExecMode.SHELL;
-			in = new BufferedReader(new InputStreamReader(System.in));
-			grunt = new Grunt(in, pigContext);
-			grunt.run();
-			rc = 0;
-			return;
-		} else {
-			// They have a pig script they want us to run.
-			if (remainders.length > 1) {
-			   	throw new RuntimeException("You can only run one pig script "
-					+ "at a time from the command line.");
-			}
-			mode = ExecMode.FILE;
-			in = new BufferedReader(new FileReader(remainders[0]));
-			grunt = new Grunt(in, pigContext);
-			grunt.exec();
-			rc = 0;
-			return;
-		}
-
-		// Per Utkarsh and Chris invocation of jar file via pig depricated.
-	} catch (ParseException e) {
-		usage();
-		rc = 1;
-	} catch (NumberFormatException e) {
-		usage();
-		rc = 1;
-	} catch (Throwable e) {
-		recursivePrintStackTrace(e);
-	} finally {
-		PerformanceTimerFactory.getPerfTimerFactory().dumpTimers();
-		System.exit(rc);
-	}
+        // If we're here, we don't know yet what they want.  They may have just
+        // given us a jar to execute, they might have given us a pig script to
+        // execute, or they might have given us a dash (or nothing) which means to
+        // run grunt interactive.
+        String remainders[] = opts.getRemainingArgs();
+        if (remainders == null) {
+            // Interactive
+            mode = ExecMode.SHELL;
+            in = new BufferedReader(new InputStreamReader(System.in));
+            grunt = new Grunt(in, pigContext);
+            grunt.run();
+            rc = 0;
+            return;
+        } else {
+            // They have a pig script they want us to run.
+            if (remainders.length > 1) {
+                   throw new RuntimeException("You can only run one pig script "
+                    + "at a time from the command line.");
+            }
+            mode = ExecMode.FILE;
+            in = new BufferedReader(new FileReader(remainders[0]));
+            grunt = new Grunt(in, pigContext);
+            grunt.exec();
+            rc = 0;
+            return;
+        }
+
+        // Per Utkarsh and Chris invocation of jar file via pig depricated.
+    } catch (ParseException e) {
+        usage();
+        rc = 1;
+    } catch (NumberFormatException e) {
+        usage();
+        rc = 1;
+    } catch (Throwable e) {
+        log.error(e);
+    } finally {
+        PerformanceTimerFactory.getPerfTimerFactory().dumpTimers();
+        System.exit(rc);
+    }
 }
     
 public static void usage()
 {
-	System.err.println("USAGE: Pig [options] [-] : Run interactively in grunt shell.");
-	System.err.println("       Pig [options] -e[xecute] cmd [cmd ...] : Run cmd(s).");
-	System.err.println("       Pig [options] [-f[ile]] file : Run cmds found in file.");
-	System.err.println("  options include:");
-    System.err.println("    -4, -log4jconf log4j configuration file, overrides log conf");
-    System.err.println("    -b, -brief brief logging (no timestamps)");
-	System.err.println("    -c, -cluster clustername, kryptonite is default");
-	System.err.println("    -d, -debug debug level, INFO is default");
-	System.err.println("    -h, -help display this message");
-	System.err.println("    -j, -jar jarfile load jarfile"); 
-	System.err.println("    -o, -hod read hod server from system property ssh.gateway");
-	System.err.println("    -v, -verbose print all log messages to screen (default to print only INFO and above to screen)");
-	System.err.println("    -x, -exectype local|mapreduce, mapreduce is default");
+    System.out.println("USAGE: Pig [options] [-] : Run interactively in grunt shell.");
+    System.out.println("       Pig [options] -e[xecute] cmd [cmd ...] : Run cmd(s).");
+    System.out.println("       Pig [options] [-f[ile]] file : Run cmds found in file.");
+    System.out.println("  options include:");
+    System.out.println("    -4, -log4jconf log4j configuration file, overrides log conf");
+    System.out.println("    -b, -brief brief logging (no timestamps)");
+    System.out.println("    -c, -cluster clustername, kryptonite is default");
+    System.out.println("    -d, -debug debug level, INFO is default");
+    System.out.println("    -h, -help display this message");
+    System.out.println("    -j, -jar jarfile load jarfile"); 
+    System.out.println("    -o, -hod read hod server from system property ssh.gateway");
+    System.out.println("    -v, -verbose print all log messages to screen (default to print only INFO and above to screen)");
+    System.out.println("    -x, -exectype local|mapreduce, mapreduce is default");
 }
-        
-        
-    /**
-     * Prints a stack trace of an exception followed by any exceptions that are the causes of that
-     * exception.
-     */
-    private static void recursivePrintStackTrace(Throwable e) {
-        while (e != null) {
-            e.printStackTrace();
-            e = e.getCause();
-        }
-    }
-
-
 }