You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2011/11/02 19:32:30 UTC

svn commit: r1196747 [1/2] - in /pig/trunk: ./ ivy/ shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/ shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/ shims/src/hadoop23/org/apache/pig/backend/hadoo...

Author: daijy
Date: Wed Nov  2 18:32:28 2011
New Revision: 1196747

URL: http://svn.apache.org/viewvc?rev=1196747&view=rev
Log:
PIG-2125: Make Pig work with hadoop .NEXT (PIG-2125-10.patch)

Added:
    pig/trunk/src/META-INF/
    pig/trunk/src/META-INF/services/
    pig/trunk/src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider
Modified:
    pig/trunk/build.xml
    pig/trunk/ivy.xml
    pig/trunk/ivy/ivysettings.xml
    pig/trunk/ivy/libraries.properties
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
    pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java
    pig/trunk/src/org/apache/pig/impl/io/PigFile.java
    pig/trunk/src/org/apache/pig/tools/pigstats/PigStatusReporter.java
    pig/trunk/test/findbugsExcludeFile.xml
    pig/trunk/test/org/apache/pig/test/TestAlgebraicEvalLocal.java
    pig/trunk/test/org/apache/pig/test/TestBZip.java
    pig/trunk/test/org/apache/pig/test/TestCombiner.java
    pig/trunk/test/org/apache/pig/test/TestCommit.java
    pig/trunk/test/org/apache/pig/test/TestNewPlanOperatorPlan.java
    pig/trunk/test/org/apache/pig/test/TestPigRunner.java
    pig/trunk/test/org/apache/pig/test/TestProject.java
    pig/trunk/test/org/apache/pig/test/TestPruneColumn.java
    pig/trunk/test/org/apache/pig/test/TestScalarAliases.java
    pig/trunk/test/org/apache/pig/test/TestScriptUDF.java
    pig/trunk/test/org/apache/pig/test/TestSecondarySort.java
    pig/trunk/test/org/apache/pig/test/TestStreaming.java
    pig/trunk/test/org/apache/pig/test/TestStreamingLocal.java
    pig/trunk/test/org/apache/pig/test/Util.java

Modified: pig/trunk/build.xml
URL: http://svn.apache.org/viewvc/pig/trunk/build.xml?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/build.xml (original)
+++ pig/trunk/build.xml Wed Nov  2 18:32:28 2011
@@ -20,8 +20,6 @@
 	xmlns:ivy="antlib:org.apache.ivy.ant">
     <!-- Load all the default properties, and any the user wants    -->
     <!-- to contribute (without having to type -D or edit this file -->
-    <taskdef resource="net/sf/antcontrib/antcontrib.properties"/>
-
     <property file="${user.home}/build.properties" />
     <property file="${basedir}/build.properties" />
 
@@ -68,7 +66,9 @@
 
     <!-- jar names. TODO we might want to use the svn reversion name in the name in case it is a dev version -->
     <property name="output.jarfile" value="${build.dir}/${final.name}.jar" />
+    <property name="output.stage.jarfile" value="${build.dir}/${final.name}.stage.jar" />
     <property name="output.jarfile.withouthadoop" value="${build.dir}/${final.name}-withouthadoop.jar" />
+    <property name="output.stage.jarfile.withouthadoop" value="${build.dir}/${final.name}-withouthadoop.stage.jar" />
     <property name="output.jarfile.core" value="${build.dir}/${final.name}-core.jar" />
     <property name="output.jarfile.sources" value="${build.dir}/${final.name}-sources.jar" />
     <property name="output.jarfile.javadoc" value="${build.dir}/${final.name}-javadoc.jar" />
@@ -158,6 +158,10 @@
            select="\1" /-->
     <property name="hadoopversion" value="20" />
 
+    <condition property="isHadoop23">
+        <equals arg1="hadoopversion" arg2="23"/>
+    </condition>
+
     <property name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
     <property name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
 
@@ -171,7 +175,6 @@
 	<property name="ant_task_repo_url"
 	value="${mvnrepo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
 	<property name="ivy_repo_url" value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
-    <property name="guava.jar" value="guava-${guava.version}.jar"/>
     <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
     <property name="ivy.org" value="org.apache.pig"/>
     <property name="build.dir" location="build" />
@@ -225,7 +228,6 @@
     <!-- javadoc-classpath -->
     <path id="javadoc-classpath">
 	<fileset file="${lib.dir}/${automaton.jarfile}" />
-        <fileset file="${lib.dir}/${guava.jarfile}" />
 	<path refid="javadoc.classpath"/>	
     </path> 
 
@@ -514,9 +516,34 @@
         </jar>
     </target>
 
+    <!-- ================================================================== -->
+    <!-- Utility to build pig.jar and pig-withouthadoop.jar                 -->
+    <!-- ================================================================== -->
     <target name="jar-all" depends="jar,jar-withouthadoop" description="Create pig-all.jar and pig-withouthadoop.jar">
     </target>
 
+    <target name="include-meta" if="isHadoop23">
+        <copy todir="${build.classes}/META-INF">
+            <fileset dir="${src.dir}/META-INF" includes="**"/>  
+        </copy>
+        <move file="${output.jarfile}" tofile="${output.stage.jarfile}"/>
+        <sleep seconds="1"/>
+        <jar jarfile="${output.jarfile}" if="isHadoop23">
+            <manifest>
+                <attribute name="Main-Class" value="org.apache.pig.Main" />
+                <section name="org/apache/pig">
+                    <attribute name="Implementation-Vendor" value="Apache" />
+                    <attribute name="Implementation-Title" value="Pig" />
+                    <attribute name="Implementation-Version" value="${version}" />
+                    <attribute name="Build-TimeStamp" value="${timestamp}" />
+                    <attribute name="Svn-Revision" value="${svnString}" />
+                </section>
+            </manifest>
+            <zipfileset src="${output.stage.jarfile}"/>
+            <fileset dir="${build.classes}" includes="META-INF/**" />
+        </jar>
+    </target>
+
     <!-- ================================================================== -->
     <!-- Make pig.jar                                                       -->
     <!-- ================================================================== -->
@@ -566,24 +593,29 @@
                     <attribute name="Svn-Revision" value="${svnString}" />
                 </section>
             </manifest>
-            <zipfileset src="${ivy.lib.dir}/hadoop-core-${hadoop-core.version}.jar" />
             <zipfileset src="${lib.dir}/${automaton.jarfile}" />
-            <zipfileset src="${ivy.lib.dir}/antlr-runtime-${antlr.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/ST4-${stringtemplate.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/junit-${junit.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jsch-${jsch.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jline-${jline.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jackson-mapper-asl-${jackson.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jackson-core-asl-${jackson.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/joda-time-${joda-time.version}.jar" /> 
-            <zipfileset src="${ivy.lib.dir}/${guava.jar}" />
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="hbase-${hbase.version}.jar" /> 
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="commons*.jar"/>
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="log4j*.jar"/>
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="jsp-api*.jar"/>
-            <zipgroupfileset dir="${ivy.lib.dir}" includes="zookeeper*.jar"/>
+            <zipgroupfileset dir="${ivy.lib.dir}">
+                <include name="hadoop-core-${hadoop-core.version}.jar" />
+                <include name="hadoop-*-${hadoop-common.version}*.jar" />
+                <include name="antlr-runtime-${antlr.version}.jar" />
+                <include name="ST4-${stringtemplate.version}.jar" />
+                <include name="junit-${junit.version}.jar" />
+                <include name="jsch-${jsch.version}.jar" />
+                <include name="jline-${jline.version}.jar" />
+                <include name="jackson-mapper-asl-${jackson.version}.jar" />
+                <include name="jackson-core-asl-${jackson.version}.jar" />
+                <include name="joda-time-${joda-time.version}.jar" />
+                <include name="guava-${guava.version}.jar" />
+                <include name="avro-${avro.version}.jar"/>
+                <include name="hbase-${hbase.version}.jar" />
+                <include name="commons*.jar"/>
+                <include name="log4j*.jar"/>
+                <include name="jsp-api*.jar"/>
+                <include name="zookeeper*.jar"/>
+            </zipgroupfileset>
             <fileset file="${basedir}/conf/pig-default.properties" />
         </jar>
+        <antcall target="include-meta" inheritRefs="true" inheritall="true"/>
         <copy file="${output.jarfile}" tofile="${output.jarfile.backcompat}"/>
     </target>
 
@@ -653,12 +685,14 @@
                 </section>
             </manifest>
             <zipfileset src="${lib.dir}/${automaton.jarfile}" />
-            <zipfileset src="${ivy.lib.dir}/antlr-runtime-${antlr.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/ST4-${stringtemplate.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jline-${jline.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jackson-mapper-asl-${jackson.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/jackson-core-asl-${jackson.version}.jar" />
-            <zipfileset src="${ivy.lib.dir}/${guava.jar}" />
+            <zipgroupfileset dir="${ivy.lib.dir}">
+                <include name="antlr-runtime-${antlr.version}.jar" />
+                <include name="ST4-${stringtemplate.version}.jar" />
+                <include name="jline-${jline.version}.jar" />
+                <include name="jackson-mapper-asl-${jackson.version}.jar" />
+                <include name="jackson-core-asl-${jackson.version}.jar" />
+                <include name="guava-${guava.version}.jar" />
+            </zipgroupfileset>
             <fileset file="${basedir}/conf/pig-default.properties" />
         </jar>
         <copy file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.backcompat.withouthadoop}"/>

Modified: pig/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/pig/trunk/ivy.xml?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/ivy.xml (original)
+++ pig/trunk/ivy.xml Wed Nov  2 18:32:28 2011
@@ -31,13 +31,15 @@
     <conf name="default" extends="master,runtime"/>
     <conf name="runtime" extends="compile,test" description="runtime but not the artifact" />
     <!--Private configurations. -->
-    <conf name="compile" visibility="private" description="compile artifacts"/>
+    <conf name="compile" extends="hadoop${hadoopversion}" visibility="private" description="compile artifacts"/>
     <conf name="test" extends="compile" visibility="private"/>
     <conf name="javadoc" visibility="private" extends="compile,test"/>
     <conf name="releaseaudit" visibility="private"/>
     <conf name="jdiff" visibility="private"/>
     <conf name="checkstyle" visibility="private"/>
     <conf name="buildJar" extends="compile,test" visibility="private"/>
+    <conf name="hadoop20" visibility="private"/>
+    <conf name="hadoop23" visibility="private"/>
   </configurations>
   <publications>
     <!--get the artifact from our module name-->
@@ -50,14 +52,56 @@
       conf="checkstyle->master"/> -->
     <dependency org="commons-beanutils" name="commons-beanutils-core" rev="${commons-beanutils.version}"
       conf="checkstyle->master"/>
+    <dependency org="commons-codec" name="commons-codec" rev="${commons-codec.version}"
+      conf="hadoop23->master"/>
     <dependency org="commons-el" name="commons-el" rev="${commons-el.version}"
       conf="compile->master"/>
+    <dependency org="commons-httpclient" name="commons-httpclient" rev="${commons-httpclient.version}"
+      conf="hadoop23->master"/>
+    <dependency org="commons-configuration" name="commons-configuration" rev="${commons-configuration.version}"
+      conf="hadoop23->master"/>
     <dependency org="log4j" name="log4j" rev="${log4j.version}"
       conf="compile->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop-core.version}"
-      conf="compile->default;test->default"/>
+      conf="hadoop20->default"/>
     <dependency org="org.apache.hadoop" name="hadoop-test" rev="${hadoop-test.version}"
-      conf="compile->default;test->default"/>
+      conf="hadoop20->default"/>
+    <dependency org="org.apache.hadoop" name="hadoop-annotations" 
+      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-auth" 
+      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-common" 
+      rev="${hadoop-common.version}" conf="hadoop23->master">
+      <artifact name="hadoop-common" ext="jar" />
+      <artifact name="hadoop-common" type="tests" ext="jar" m:classifier="tests" />
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-hdfs"
+      rev="${hadoop-hdfs.version}" conf="hadoop23->master">
+      <artifact name="hadoop-hdfs" ext="jar" />
+      <artifact name="hadoop-hdfs" type="tests" ext="jar" m:classifier="tests" />
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core" rev="${hadoop-mapreduce.version}"
+      conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop-mapreduce.version}"
+      conf="hadoop23->master">
+        <artifact name="hadoop-mapreduce-client-jobclient" type="jar" m:classifier="tests"/>
+        <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
+        <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests" rev="${hadoop-mapreduce.version}"
+      conf="hadoop23->master">
+      <artifact name="hadoop-yarn-server-tests" type="jar" m:classifier="tests"/>
+    </dependency>
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common" 
+      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-api" 
+      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-common" 
+      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-nodemanager" 
+      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-resourcemanager" 
+      rev="${hadoop-common.version}" conf="hadoop23->master"/>
     <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}"
       conf="compile->master"/>
     <dependency org="org.mortbay.jetty" name="jetty-util" rev="${jetty-util.version}"
@@ -67,7 +111,7 @@
      <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j-log4j12.version}"
       conf="compile->master;test->master"/>
     <dependency org="commons-cli" name="commons-cli" rev="${commons-cli.version}"
-      conf="checkstyle->master"/>
+      conf="compile->master;checkstyle->master"/>
     
     <dependency org="org.apache.avro" name="avro" rev="${avro.version}"
       conf="compile->default;checkstyle->master"/>
@@ -93,14 +137,14 @@
     <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${jackson.version}"
       conf="compile->master"/>
     <dependency org="joda-time" name="joda-time" rev="${joda-time.version}" conf="compile->master"/>
-  	<dependency org="commons-lang" name="commons-lang" rev="${commons-lang.version}"
+    <dependency org="commons-lang" name="commons-lang" rev="${commons-lang.version}"
   	  conf="compile->master"/>
     <dependency org="com.google.guava" name="guava" rev="${guava.version}" conf="compile->master" />
-  	<dependency org="org.python" name="jython" rev="${jython.version}" conf="compile->master"/>
+    <dependency org="org.python" name="jython" rev="${jython.version}" conf="compile->master"/>
     <dependency org="rhino" name="js" rev="${rhino.version}" conf="compile->master"/>
-  	<dependency org="org.antlr" name="antlr" rev="${antlr.version}" conf="compile->master"/>
-  	<dependency org="org.antlr" name="antlr-runtime" rev="${antlr.version}" conf="compile->default"/>
-	<dependency org="org.antlr" name="ST4" rev="${stringtemplate.version}" conf="compile->default"/>
+    <dependency org="org.antlr" name="antlr" rev="${antlr.version}" conf="compile->master"/>
+    <dependency org="org.antlr" name="antlr-runtime" rev="${antlr.version}" conf="compile->default"/>
+    <dependency org="org.antlr" name="ST4" rev="${stringtemplate.version}" conf="compile->default"/>
     <dependency org="org.apache.zookeeper" name="zookeeper" rev="${zookeeper.version}" conf="compile->master"/>
     <dependency org="org.jboss.netty" name="netty" rev="3.2.2.Final" conf="compile->master"/>
 

Modified: pig/trunk/ivy/ivysettings.xml
URL: http://svn.apache.org/viewvc/pig/trunk/ivy/ivysettings.xml?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/ivy/ivysettings.xml (original)
+++ pig/trunk/ivy/ivysettings.xml Wed Nov  2 18:32:28 2011
@@ -31,22 +31,32 @@
   -->
   <property name="repo.maven.org" value="${mvnrepo}" override="true"/>
   <property name="repo.jboss.org" value="http://repository.jboss.com/nexus/content/groups/public/" override="false"/>
-
+  <property name="repo.apache.snapshots" value="http://repository.apache.org/content/groups/snapshots-group/" override="false"/>
+  <property name="repo.dir" value="${user.home}/.m2/repository"/>
   <property name="maven2.pattern"  value="[organisation]/[module]/[revision]/[module]-[revision](-[classifier])"/> 
   <property name="maven2.pattern.ext" value="${maven2.pattern}.[ext]"/>
+  <property name="snapshot.pattern"  value="[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]"/>
+  <property name="resolvers" value="default" override="false"/>
+  <property name="force-resolve" value="false" override="false"/>
   <!-- pull in the local repository -->
   <include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
-  <settings defaultResolver="default"/>
+  <settings defaultResolver="${resolvers}"/>
   <resolvers>
     <ibiblio name="maven2" root="${repo.maven.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
     <ibiblio name="jboss-maven2" root="${repo.jboss.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
+    <ibiblio name="apache-snapshots" root="${repo.apache.snapshots}" pattern="${snapshot.pattern}" checkmodified="true" m2compatible="true"/>
+    <filesystem name="fs" m2compatible="true" checkconsistency="false" force="${force-resolve}">
+       <artifact pattern="${repo.dir}/${maven2.pattern.ext}"/>
+       <ivy pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision].pom"/>
+    </filesystem>
     <chain name="default" dual="true">
       <resolver ref="local"/>
       <resolver ref="maven2"/>
       <resolver ref="jboss-maven2"/>
+      <resolver ref="apache-snapshots"/>
     </chain>
     <chain name="internal">
-      <resolver ref="local"/>
+      <resolver ref="fs"/>
     </chain>
     <chain name="external">
       <resolver ref="maven2"/>

Modified: pig/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/pig/trunk/ivy/libraries.properties?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/ivy/libraries.properties (original)
+++ pig/trunk/ivy/libraries.properties Wed Nov  2 18:32:28 2011
@@ -18,14 +18,20 @@ apacheant.version=1.7.1
 avro.version=1.5.3
 commons-beanutils.version=1.7.0
 commons-cli.version=1.0
+commons-codec.version=1.4
 commons-el.version=1.0
 commons-logging.version=1.1.1
 commons-lang.version=2.4
+commons-configuration.version=1.6
+commons-httpclient.version=3.1
 checkstyle.version=4.2
 ivy.version=2.2.0
 guava.version=r06
 hadoop-core.version=0.20.2
 hadoop-test.version=0.20.2
+hadoop-common.version=0.23.0-SNAPSHOT
+hadoop-hdfs.version=0.23.0-SNAPSHOT
+hadoop-mapreduce.version=0.23.0-SNAPSHOT
 hbase.version=0.90.0
 hsqldb.version=1.8.0.10
 jackson.version=1.7.3
@@ -43,11 +49,11 @@ jython.version=2.5.0
 rhino.version=1.7R2
 antlr.version=3.4
 stringtemplate.version=4.0.4
-log4j.version=1.2.14
+log4j.version=1.2.16
 netty.version=3.2.2
 rats-lib.version=0.5.1
-slf4j-api.version=1.4.3
-slf4j-log4j12.version=1.4.3
+slf4j-api.version=1.6.1
+slf4j-log4j12.version=1.6.1
 xerces.version=1.4.4
 wagon-http.version=1.0-beta-2
 zookeeper.version=3.3.3

Modified: pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java (original)
+++ pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java Wed Nov  2 18:32:28 2011
@@ -29,6 +29,7 @@ import org.apache.hadoop.mapred.Counters
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.StatusReporter;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBase;
 import org.apache.pig.data.DataBag;
 import org.apache.pig.data.Tuple;
@@ -55,7 +56,10 @@ abstract public class PigMapBase extends
         return new IllustratorContext(conf, input, output, split);
     }
     
-    
+    @Override
+    public boolean inIllustrator(Context context) {
+        return (context instanceof PigMapBase.IllustratorContext);
+    }
 
     /**
      * Dummy implementation of StatusReporter for illustrate mode

Modified: pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java (original)
+++ pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java Wed Nov  2 18:32:28 2011
@@ -160,5 +160,17 @@ public class PigMapReduce extends PigGen
             public void progress() { 
             }
         }
+
+        @Override
+        public boolean inIllustrator(
+                org.apache.hadoop.mapreduce.Reducer.Context context) {
+            return (context instanceof PigMapReduce.Reduce.IllustratorContext);
+        }
+
+        @Override
+        public POPackage getPack(
+                org.apache.hadoop.mapreduce.Reducer.Context context) {
+            return ((PigMapReduce.Reduce.IllustratorContext) context).pack;
+        }
     }
 }

Modified: pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java (original)
+++ pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java Wed Nov  2 18:32:28 2011
@@ -58,4 +58,8 @@ public class HadoopShims {
     static public boolean isMap(TaskAttemptID taskAttemptID) {
         return taskAttemptID.isMap();
     }
+    
+    static public TaskAttemptID getNewTaskAttemptID() {
+        return new TaskAttemptID();
+    }
 }

Modified: pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java (original)
+++ pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java Wed Nov  2 18:32:28 2011
@@ -19,6 +19,7 @@ package org.apache.pig.backend.hadoop.ex
 
 
 import java.io.IOException;
+
 import java.net.URI;
 import java.util.Iterator;
 import java.util.List;
@@ -37,14 +38,21 @@ import org.apache.hadoop.mapreduce.Mappe
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.Partitioner;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.StatusReporter;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.Reducer.Context;
+import org.apache.hadoop.mapreduce.lib.map.WrappedMapper;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.security.Credentials;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigGenericMapBase;
 import org.apache.pig.data.DataBag;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.io.PigNullableWritable;
 import org.apache.pig.impl.util.Pair;
+import org.apache.hadoop.mapreduce.lib.map.WrappedMapper;
 
 abstract public class PigMapBase extends PigGenericMapBase {
     /**
@@ -63,10 +71,11 @@ abstract public class PigMapBase extends
     public Context getIllustratorContext(Configuration conf, DataBag input,
           List<Pair<PigNullableWritable, Writable>> output, InputSplit split)
           throws IOException, InterruptedException {
-        return new IllustratorContext(conf, input, output, split);
+    	org.apache.hadoop.mapreduce.Mapper.Context mapperContext = new WrappedMapper<Text, Tuple, PigNullableWritable, Writable>().getMapContext(new IllustratorContext(conf, input, output, split));
+        return mapperContext;
     }
     
-    public class IllustratorContext extends Context {
+    public class IllustratorContext extends MapContextImpl<Text, Tuple, PigNullableWritable, Writable> {
         private DataBag input;
         List<Pair<PigNullableWritable, Writable>> output;
         private Iterator<Tuple> it = null;
@@ -76,11 +85,12 @@ abstract public class PigMapBase extends
         public IllustratorContext(Configuration conf, DataBag input,
               List<Pair<PigNullableWritable, Writable>> output,
               InputSplit split) throws IOException, InterruptedException {
-              if (output == null)
-                  throw new IOException("Null output can not be used");
-              this.input = input; this.output = output;
+            super(conf, new TaskAttemptID(), null, null, null, null, split);
+            if (output == null)
+                throw new IOException("Null output can not be used");
+            this.input = input; this.output = output;
         }
-        
+
         @Override
         public boolean nextKeyValue() throws IOException, InterruptedException {
             if (input == null) {
@@ -118,263 +128,10 @@ abstract public class PigMapBase extends
         public void progress() {
           
         }
-
-        @Override
-        public InputSplit getInputSplit() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Counter getCounter(Enum<?> arg0) {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Counter getCounter(String arg0, String arg1) {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public OutputCommitter getOutputCommitter() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public String getStatus() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public TaskAttemptID getTaskAttemptID() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public void setStatus(String arg0) {
-            // TODO Auto-generated method stub
-            
-        }
-
-        @Override
-        public Path[] getArchiveClassPaths() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public long[] getArchiveTimestamps() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public URI[] getCacheArchives() throws IOException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public URI[] getCacheFiles() throws IOException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass()
-                throws ClassNotFoundException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Configuration getConfiguration() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Credentials getCredentials() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Path[] getFileClassPaths() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public long[] getFileTimestamps() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public RawComparator<?> getGroupingComparator() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<? extends InputFormat<?, ?>> getInputFormatClass()
-                throws ClassNotFoundException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public String getJar() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public JobID getJobID() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public String getJobName() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public boolean getJobSetupCleanupNeeded() {
-            // TODO Auto-generated method stub
-            return false;
-        }
-
-        @Override
-        public Path[] getLocalCacheArchives() throws IOException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Path[] getLocalCacheFiles() throws IOException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<?> getMapOutputKeyClass() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<?> getMapOutputValueClass() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass()
-                throws ClassNotFoundException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public int getMaxMapAttempts() {
-            // TODO Auto-generated method stub
-            return 0;
-        }
-
-        @Override
-        public int getMaxReduceAttempts() {
-            // TODO Auto-generated method stub
-            return 0;
-        }
-
-        @Override
-        public int getNumReduceTasks() {
-            // TODO Auto-generated method stub
-            return 0;
-        }
-
-        @Override
-        public Class<? extends OutputFormat<?, ?>> getOutputFormatClass()
-                throws ClassNotFoundException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<?> getOutputKeyClass() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<?> getOutputValueClass() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<? extends Partitioner<?, ?>> getPartitionerClass()
-                throws ClassNotFoundException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public boolean getProfileEnabled() {
-            // TODO Auto-generated method stub
-            return false;
-        }
-
-        @Override
-        public String getProfileParams() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public IntegerRanges getProfileTaskRange(boolean arg0) {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass()
-                throws ClassNotFoundException {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public RawComparator<?> getSortComparator() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public boolean getSymlink() {
-            // TODO Auto-generated method stub
-            return false;
-        }
-
-        @Override
-        public String getUser() {
-            // TODO Auto-generated method stub
-            return null;
-        }
-
-        @Override
-        public Path getWorkingDirectory() throws IOException {
-            // TODO Auto-generated method stub
-            return null;
-        }
+    }
+    
+    @Override
+    public boolean inIllustrator(Context context) {
+        return (context instanceof WrappedMapper.Context);
     }
 }

Modified: pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java (original)
+++ pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java Wed Nov  2 18:32:28 2011
@@ -27,10 +27,12 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapreduce.Counter;
@@ -40,17 +42,41 @@ import org.apache.hadoop.mapreduce.Mappe
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.Partitioner;
+import org.apache.hadoop.mapreduce.ReduceContext;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.Reducer.Context;
+import org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer;
+import org.apache.hadoop.mapreduce.task.ReduceContextImpl;
 import org.apache.hadoop.security.Credentials;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapBase.IllustratorContext;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
+import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.io.NullableTuple;
 import org.apache.pig.impl.io.PigNullableWritable;
 import org.apache.pig.impl.util.Pair;
 import org.apache.pig.pen.FakeRawKeyValueIterator;
 
 public class PigMapReduce extends PigGenericMapReduce {
+    
+    static class IllustrateReducerContext extends WrappedReducer<PigNullableWritable, NullableTuple, PigNullableWritable, Writable> {
+        public IllustratorContext 
+        getReducerContext(ReduceContext<PigNullableWritable, NullableTuple, PigNullableWritable, Writable> reduceContext) {
+            return new IllustratorContext(reduceContext);
+        }
+        
+        public class IllustratorContext 
+            extends WrappedReducer.Context {
+            public IllustratorContext(
+                    ReduceContext<PigNullableWritable, NullableTuple, PigNullableWritable, Writable> reduceContext) {
+                super(reduceContext);
+            }
+            public POPackage getPack() {
+                return ((Reduce.IllustratorContextImpl)reduceContext).pack;
+            }
+        }
+    }
+		  
     public static class Reduce extends PigGenericMapReduce.Reduce {
         /**
          * Get reducer's illustrator context
@@ -64,11 +90,13 @@ public class PigMapReduce extends PigGen
         @Override
         public Context getIllustratorContext(Job job,
                List<Pair<PigNullableWritable, Writable>> input, POPackage pkg) throws IOException, InterruptedException {
-            return new IllustratorContext(job, input, pkg);
+        	org.apache.hadoop.mapreduce.Reducer.Context reducerContext = new IllustrateReducerContext()
+        			.getReducerContext(new IllustratorContextImpl(job, input, pkg));
+        	return reducerContext;
         }
         
         @SuppressWarnings("unchecked")
-        public class IllustratorContext extends Context {
+        public class IllustratorContextImpl extends ReduceContextImpl<PigNullableWritable, NullableTuple, PigNullableWritable, Writable> {
             private PigNullableWritable currentKey = null, nextKey = null;
             private NullableTuple nextValue = null;
             private List<NullableTuple> currentValues = null;
@@ -76,12 +104,15 @@ public class PigMapReduce extends PigGen
             private final ByteArrayOutputStream bos;
             private final DataOutputStream dos;
             private final RawComparator sortComparator, groupingComparator;
-            POPackage pack = null;
+            public POPackage pack = null;
+            private IllustratorValueIterable iterable = new IllustratorValueIterable();
 
-            public IllustratorContext(Job job,
+            public IllustratorContextImpl(Job job,
                   List<Pair<PigNullableWritable, Writable>> input,
                   POPackage pkg
                   ) throws IOException, InterruptedException {
+                super(job.getJobConf(), new TaskAttemptID(), new FakeRawKeyValueIterator(input.iterator().hasNext()),
+                    null, null, null, null, null, null, PigNullableWritable.class, NullableTuple.class);
                 bos = new ByteArrayOutputStream();
                 dos = new DataOutputStream(bos);
                 org.apache.hadoop.mapreduce.Job nwJob = new org.apache.hadoop.mapreduce.Job(job.getJobConf());
@@ -116,6 +147,60 @@ public class PigMapReduce extends PigGen
                 pack = pkg;
             }
             
+            public class IllustratorValueIterator implements ReduceContext.ValueIterator<NullableTuple> {
+                
+                private int pos = -1;
+                private int mark = -1;
+
+                @Override
+                public void mark() throws IOException {
+                    mark=pos-1;
+                    if (mark<-1)
+                        mark=-1;
+                }
+
+                @Override
+                public void reset() throws IOException {
+                    pos=mark;
+                }
+
+                @Override
+                public void clearMark() throws IOException {
+                    mark=-1;
+                }
+
+                @Override
+                public boolean hasNext() {
+                    return pos<currentValues.size()-1;
+                }
+
+                @Override
+                public NullableTuple next() {
+                    pos++;
+                    return currentValues.get(pos);
+                }
+
+                @Override
+                public void remove() {
+                    throw new UnsupportedOperationException("remove not implemented");
+                }
+
+                @Override
+                public void resetBackupStore() throws IOException {
+                    pos=-1;
+                    mark=-1;
+                }
+                
+            }
+            
+            protected class IllustratorValueIterable implements Iterable<NullableTuple> {
+                private IllustratorValueIterator iterator = new IllustratorValueIterator();
+                @Override
+                public Iterator<NullableTuple> iterator() {
+                    return iterator;
+                } 
+            }
+            
             @Override
             public PigNullableWritable getCurrentKey() {
                 return currentKey;
@@ -160,7 +245,7 @@ public class PigMapReduce extends PigGen
             
             @Override
             public Iterable<NullableTuple> getValues() {
-                return currentValues;
+                return iterable;
             }
             
             @Override
@@ -170,272 +255,16 @@ public class PigMapReduce extends PigGen
             @Override
             public void progress() { 
             }
+        }
 
-            @Override
-            public Counter getCounter(Enum<?> arg0) {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Counter getCounter(String arg0, String arg1) {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public NullableTuple getCurrentValue() throws IOException,
-                    InterruptedException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public OutputCommitter getOutputCommitter() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public boolean nextKeyValue() throws IOException,
-                    InterruptedException {
-                // TODO Auto-generated method stub
-                return false;
-            }
-
-            @Override
-            public String getStatus() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public TaskAttemptID getTaskAttemptID() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public void setStatus(String arg0) {
-                // TODO Auto-generated method stub
-                
-            }
-
-            @Override
-            public Path[] getArchiveClassPaths() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public long[] getArchiveTimestamps() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public URI[] getCacheArchives() throws IOException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public URI[] getCacheFiles() throws IOException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass()
-                    throws ClassNotFoundException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Configuration getConfiguration() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Credentials getCredentials() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Path[] getFileClassPaths() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public long[] getFileTimestamps() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public RawComparator<?> getGroupingComparator() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<? extends InputFormat<?, ?>> getInputFormatClass()
-                    throws ClassNotFoundException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public String getJar() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public JobID getJobID() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public String getJobName() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public boolean getJobSetupCleanupNeeded() {
-                // TODO Auto-generated method stub
-                return false;
-            }
-
-            @Override
-            public Path[] getLocalCacheArchives() throws IOException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Path[] getLocalCacheFiles() throws IOException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<?> getMapOutputKeyClass() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<?> getMapOutputValueClass() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass()
-                    throws ClassNotFoundException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public int getMaxMapAttempts() {
-                // TODO Auto-generated method stub
-                return 0;
-            }
-
-            @Override
-            public int getMaxReduceAttempts() {
-                // TODO Auto-generated method stub
-                return 0;
-            }
-
-            @Override
-            public int getNumReduceTasks() {
-                // TODO Auto-generated method stub
-                return 0;
-            }
-
-            @Override
-            public Class<? extends OutputFormat<?, ?>> getOutputFormatClass()
-                    throws ClassNotFoundException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<?> getOutputKeyClass() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<?> getOutputValueClass() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<? extends Partitioner<?, ?>> getPartitionerClass()
-                    throws ClassNotFoundException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public boolean getProfileEnabled() {
-                // TODO Auto-generated method stub
-                return false;
-            }
-
-            @Override
-            public String getProfileParams() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public IntegerRanges getProfileTaskRange(boolean arg0) {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass()
-                    throws ClassNotFoundException {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public RawComparator<?> getSortComparator() {
-                // TODO Auto-generated method stub
-                return null;
-            }
-
-            @Override
-            public boolean getSymlink() {
-                // TODO Auto-generated method stub
-                return false;
-            }
-
-            @Override
-            public String getUser() {
-                // TODO Auto-generated method stub
-                return null;
-            }
+        @Override
+        public boolean inIllustrator(org.apache.hadoop.mapreduce.Reducer.Context context) {
+            return (context instanceof PigMapReduce.IllustrateReducerContext.IllustratorContext);
+        }
 
-            @Override
-            public Path getWorkingDirectory() throws IOException {
-                // TODO Auto-generated method stub
-                return null;
-            }
+        @Override
+        public POPackage getPack(org.apache.hadoop.mapreduce.Reducer.Context context) {
+            return ((PigMapReduce.IllustrateReducerContext.IllustratorContext) context).getPack();
         }
     }
 }

Modified: pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java (original)
+++ pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java Wed Nov  2 18:32:28 2011
@@ -56,4 +56,10 @@ public class HadoopShims {
         
         return false;
     }
+    
+    static public TaskAttemptID getNewTaskAttemptID() {
+        TaskAttemptID taskAttemptID = new TaskAttemptID("", 1, TaskType.MAP, 
+                1, 1);
+        return taskAttemptID;
+    }
 }

Modified: pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java (original)
+++ pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java Wed Nov  2 18:32:28 2011
@@ -92,15 +92,18 @@ public class MiniCluster extends MiniGen
 				e.printStackTrace();
 			}
 			*/
-			m_dfs.getFileSystem().copyFromLocalFile(new Path("file:///hadoop-mapreduce-client-app-1.0-SNAPSHOT.jar"), new Path("/hadoop-mapreduce-client-app-1.0-SNAPSHOT.jar"));
-			m_dfs.getFileSystem().copyFromLocalFile(new Path("file:///hadoop-mapreduce-client-jobclient-1.0-SNAPSHOT.jar"), new Path("/hadoop-mapreduce-client-jobclient-1.0-SNAPSHOT.jar"));
+			m_dfs.getFileSystem().copyFromLocalFile(new Path("file:///hadoop-mapreduce-client-app-0.23.0-SNAPSHOT.jar"), new Path("/hadoop-mapreduce-client-app-0.23.0-SNAPSHOT.jar"));
+			m_dfs.getFileSystem().copyFromLocalFile(new Path("file:///hadoop-mapreduce-client-jobclient-0.23.0-SNAPSHOT.jar"), new Path("/hadoop-mapreduce-client-jobclient-0.23.0-SNAPSHOT.jar"));
 			m_dfs.getFileSystem().copyFromLocalFile(new Path("file:///pig.jar"), new Path("/pig.jar"));
 			m_dfs.getFileSystem().copyFromLocalFile(new Path("file:///pig-test.jar"), new Path("/pig-test.jar"));
 
-            DistributedCache.addFileToClassPath(new Path("/hadoop-mapreduce-client-app-1.0-SNAPSHOT.jar"), m_conf);
+            DistributedCache.addFileToClassPath(new Path("/hadoop-mapreduce-client-app-0.23.0-SNAPSHOT.jar"), m_conf);
             DistributedCache.addFileToClassPath(new Path("/pig.jar"), m_conf);
             DistributedCache.addFileToClassPath(new Path("/pig-test.jar"), m_conf);
-            DistributedCache.addFileToClassPath(new Path("/hadoop-mapreduce-client-jobclient-1.0-SNAPSHOT.jar"), m_conf);
+            DistributedCache.addFileToClassPath(new Path("/hadoop-mapreduce-client-jobclient-0.23.0-SNAPSHOT.jar"), m_conf);
+            String cachefile = m_conf.get("mapreduce.job.cache.files");
+            m_conf.set("alternative.mapreduce.job.cache.files", cachefile);
+            m_conf.unset("mapreduce.job.cache.files");
 
             //ConfigurationUtil.mergeConf(m_conf, m_dfs_conf);
             //ConfigurationUtil.mergeConf(m_conf, m_mr_conf);

Added: pig/trunk/src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider
URL: http://svn.apache.org/viewvc/pig/trunk/src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider?rev=1196747&view=auto
==============================================================================
--- pig/trunk/src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider (added)
+++ pig/trunk/src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider Wed Nov  2 18:32:28 2011
@@ -0,0 +1,17 @@
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+org.apache.hadoop.mapred.LocalClientProtocolProvider
+org.apache.hadoop.mapred.YarnClientProtocolProvider
+org.apache.hadoop.mapred.LocalClientProtocolProvider
+org.apache.hadoop.mapred.YarnClientProtocolProvider

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java Wed Nov  2 18:32:28 2011
@@ -18,6 +18,8 @@
 
 package org.apache.pig.backend.hadoop.datastorage;
 
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 import java.util.Enumeration;
 import java.util.Iterator;
 import java.util.Map;
@@ -73,6 +75,23 @@ public class ConfigurationUtil {
             localConf.addResource("core-default.xml");
         } else {
             localConf = new Configuration(true);
+            // It's really hacky, try to get unit test working under hadoop 23.
+            // Hadoop23 MiniMRCluster currently need setup Distributed cache before start, 
+            // so pigtest/conf/hadoop-site.xml contains such entry. This prevents some tests from 
+            // successful (They expect those files in hdfs), so we need to unset it in hadoop 23.
+            // This should go away once MiniMRCluster fix the distributed cache issue.
+            Method unsetMethod = null;
+            try {
+                unsetMethod = localConf.getClass().getMethod("unset", new Class[]{String.class});
+            } catch (Exception e) {
+            }
+            if (unsetMethod!=null) {
+                try {
+                    unsetMethod.invoke(localConf, new Object[]{"mapreduce.job.cache.files"});
+                } catch (Exception e) {
+                    // Should not happen
+                }
+            }
         }
         Properties props = ConfigurationUtil.toProperties(localConf);
         props.setProperty(MapRedUtil.FILE_SYSTEM_NAME, "file:///");

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java Wed Nov  2 18:32:28 2011
@@ -175,6 +175,7 @@ public class HExecutionEngine {
             jc.addResource("mapred-default.xml");
             recomputeProperties(jc, properties);
             
+            properties.setProperty("mapreduce.framework.name", "local");
             properties.setProperty(JOB_TRACKER_LOCATION, LOCAL );
             properties.setProperty(FILE_SYSTEM_LOCATION, "file:///");
             properties.setProperty(ALTERNATIVE_FILE_SYSTEM_LOCATION, "file:///");

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java Wed Nov  2 18:32:28 2011
@@ -703,7 +703,17 @@ public class JobControlCompiler{
             }
             if (maxCombinedSplitSize > 0)
                 conf.setLong("pig.maxCombinedSplitSize", maxCombinedSplitSize);
-                        
+            
+            // It's a hack to set distributed cache file for hadoop 23. Once MiniMRCluster do not require local
+            // jar on fixed location, this can be removed
+            if (pigContext.getExecType() == ExecType.MAPREDUCE) {
+                String newfiles = conf.get("alternative.mapreduce.job.cache.files");
+                if (newfiles!=null) {
+                    String files = conf.get("mapreduce.job.cache.files");
+                    conf.set("mapreduce.job.cache.files",
+                        files == null ? newfiles.toString() : files + "," + newfiles);
+                }
+            }
             // Serialize the UDF specific context info.
             UDFContext.getUDFContext().serialize(conf);
             Job cjob = new Job(new JobConf(nwJob.getConfiguration()), new ArrayList());

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java Wed Nov  2 18:32:28 2011
@@ -164,7 +164,7 @@ public abstract class PigGenericMapBase 
         PigMapReduce.sJobContext = context;
         PigMapReduce.sJobConfInternal.set(context.getConfiguration());
         PigMapReduce.sJobConf = context.getConfiguration();
-        inIllustrator = (context instanceof PigMapBase.IllustratorContext);
+        inIllustrator = inIllustrator(context);
         
         PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(job.get("udf.import.list")));
         pigContext = (PigContext)ObjectSerializer.deserialize(job.get("pig.pigContext"));
@@ -301,6 +301,8 @@ public abstract class PigGenericMapBase 
 
     abstract public void collect(Context oc, Tuple tuple) throws InterruptedException, IOException;
 
+    abstract public boolean inIllustrator(Context context);
+    
     /**
      * @return the keyType
      */

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java Wed Nov  2 18:32:28 2011
@@ -35,6 +35,7 @@ import org.apache.hadoop.mapreduce.JobCo
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.Mapper.Context;
 
 import org.apache.pig.PigException;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -307,9 +308,9 @@ public class PigGenericMapReduce {
         @Override
         protected void setup(Context context) throws IOException, InterruptedException {
             super.setup(context);
-            inIllustrator = (context instanceof PigMapReduce.Reduce.IllustratorContext);
+            inIllustrator = inIllustrator(context);
             if (inIllustrator)
-                pack = ((PigMapReduce.Reduce.IllustratorContext) context).pack;
+                pack = getPack(context);
             Configuration jConf = context.getConfiguration();
             SpillableMemoryManager.configure(ConfigurationUtil.toProperties(jConf));
             sJobContext = context;
@@ -550,6 +551,10 @@ public class PigGenericMapReduce {
          */
         abstract public Context getIllustratorContext(Job job,
                List<Pair<PigNullableWritable, Writable>> input, POPackage pkg) throws IOException, InterruptedException;
+        
+        abstract public boolean inIllustrator(Context context);
+        
+        abstract public POPackage getPack(Context context);
     }
     
     /**

Modified: pig/trunk/src/org/apache/pig/impl/io/PigFile.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/impl/io/PigFile.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/impl/io/PigFile.java (original)
+++ pig/trunk/src/org/apache/pig/impl/io/PigFile.java Wed Nov  2 18:32:28 2011
@@ -80,7 +80,8 @@ public class PigFile {
         PigOutputFormat.setLocation(jc, store);
         OutputCommitter oc;
         // create a simulated TaskAttemptContext
-        TaskAttemptContext tac = HadoopShims.createTaskAttemptContext(conf, new TaskAttemptID());
+        
+        TaskAttemptContext tac = HadoopShims.createTaskAttemptContext(conf, HadoopShims.getNewTaskAttemptID());
         PigOutputFormat.setLocation(tac, store);
         RecordWriter<?,?> rw ;
         try {

Modified: pig/trunk/src/org/apache/pig/tools/pigstats/PigStatusReporter.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/tools/pigstats/PigStatusReporter.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/tools/pigstats/PigStatusReporter.java (original)
+++ pig/trunk/src/org/apache/pig/tools/pigstats/PigStatusReporter.java Wed Nov  2 18:32:28 2011
@@ -74,4 +74,7 @@ public class PigStatusReporter extends S
         }
     }
 
+    public float getProgress() {
+        return 0;
+    }
 }

Modified: pig/trunk/test/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/pig/trunk/test/findbugsExcludeFile.xml?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/findbugsExcludeFile.xml (original)
+++ pig/trunk/test/findbugsExcludeFile.xml Wed Nov  2 18:32:28 2011
@@ -441,4 +441,8 @@
         <Class name = "org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigOutputCommitter" />
         <Bug pattern = "REC_CATCH_EXCEPTION" />
     </Match>
+    <Match>
+        <Class name = "org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapBase$IllustratorContext" />
+        <Bug pattern = "SIC_INNER_SHOULD_BE_STATIC" />
+    </Match>
 </FindBugsFilter>

Modified: pig/trunk/test/org/apache/pig/test/TestAlgebraicEvalLocal.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestAlgebraicEvalLocal.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestAlgebraicEvalLocal.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestAlgebraicEvalLocal.java Wed Nov  2 18:32:28 2011
@@ -21,10 +21,12 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.PrintStream;
 import java.util.Iterator;
+import java.util.Properties;
 import java.util.Random;
 
 import junit.framework.TestCase;
 
+import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.DataType;
@@ -42,7 +44,7 @@ public class TestAlgebraicEvalLocal exte
     @Before
     @Override
     protected void setUp() throws Exception {
-        pig = new PigServer("local");
+        pig = new PigServer(ExecType.LOCAL, new Properties());
     }
     
     Boolean[] nullFlags = new Boolean[]{ false, true};

Modified: pig/trunk/test/org/apache/pig/test/TestBZip.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestBZip.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestBZip.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestBZip.java Wed Nov  2 18:32:28 2011
@@ -590,7 +590,9 @@ public class TestBZip {
         Util.createInputFile(cluster, inputFileName, inputData);
 
         String inputScript = "set mapred.output.compress true\n" +
+                "set mapreduce.output.fileoutputformat.compress true\n" +
                 "set mapred.output.compression.codec org.apache.hadoop.io.compress.BZip2Codec\n" +
+                "set mapreduce.output.fileoutputformat.compress.codec org.apache.hadoop.io.compress.BZip2Codec\n" +
                 "a = load '" + inputFileName + "';\n" +
                 "store a into 'output3.bz2';\n" +
                 "store a into 'output3';";

Modified: pig/trunk/test/org/apache/pig/test/TestCombiner.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestCombiner.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestCombiner.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestCombiner.java Wed Nov  2 18:32:28 2011
@@ -37,12 +37,17 @@ import junit.framework.Assert;
 import org.apache.pig.EvalFunc;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
+import org.apache.pig.ResourceSchema.ResourceFieldSchema;
 import org.apache.pig.builtin.PigStorage;
 import org.apache.pig.data.DataBag;
+import org.apache.pig.data.DataType;
 import org.apache.pig.data.DefaultDataBag;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.io.FileLocalizer;
+import org.apache.pig.impl.util.Utils;
+import org.apache.pig.newplan.logical.relational.LogicalSchema;
+import org.apache.pig.newplan.logical.relational.LogicalSchema.LogicalFieldSchema;
 import org.junit.AfterClass;
 import org.junit.Test;
 
@@ -495,11 +500,7 @@ public class TestCombiner {
             assertFalse(baos.toString().matches("(?si).*combine plan.*"));
     
             Iterator<Tuple> it = pigServer.openIterator("c");
-            int count = 0;
-            while (it.hasNext()) {
-                Tuple t = it.next();
-                assertEquals(expected[count++], t.toString());
-            }
+            Util.checkQueryOutputsAfterSortRecursive(it, expected, "group:chararray,age:long,b:{t:(name:chararray,age:int,gpa:double)}");
         } catch (IOException e) {
             e.printStackTrace();
             Assert.fail();

Modified: pig/trunk/test/org/apache/pig/test/TestCommit.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestCommit.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestCommit.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestCommit.java Wed Nov  2 18:32:28 2011
@@ -144,22 +144,21 @@ public class TestCommit extends TestCase
         pigServer.registerQuery("y = foreach x generate age, cnt, max, min;");
         Iterator<Tuple> iter = pigServer.openIterator("y");
         int count = 0;
+        boolean contain1=false, contain2=false;
         while(iter.hasNext()){
             Tuple t = iter.next();
             count++;
-            if (count == 1) {
-                assertTrue(t.get(0).equals(expected1.get(0)));
-                assertTrue(t.get(1).equals(expected1.get(1)));
-                assertTrue(t.get(2).equals(expected1.get(2)));
-                assertTrue(t.get(3).equals(expected1.get(3)));
-            } else if (count == 2){
-                assertTrue(t.get(0).equals(expected2.get(0)));
-                assertTrue(t.get(1).equals(expected2.get(1)));
-                assertTrue(t.get(2).equals(expected2.get(2)));
-                assertTrue(t.get(3).equals(expected2.get(3)));
+            if (t.get(0).equals(expected1.get(0)) && t.get(1).equals(expected1.get(1)) && t.get(2).equals(expected1.get(2)) && t.get(3).equals(expected1.get(3))) {
+                contain1 = true;
             }
+            
+            if (t.get(0).equals(expected2.get(0)) && t.get(1).equals(expected2.get(1)) && t.get(2).equals(expected2.get(2)) && t.get(3).equals(expected2.get(3))) {
+                contain2 = true;
+            }
+            
         }
         assertEquals(count, 2);
+        assertTrue(contain1 && contain2);
         Util.deleteFile(cluster, "testCheckin2-input.txt");
     }    
 }

Modified: pig/trunk/test/org/apache/pig/test/TestNewPlanOperatorPlan.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestNewPlanOperatorPlan.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestNewPlanOperatorPlan.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestNewPlanOperatorPlan.java Wed Nov  2 18:32:28 2011
@@ -1521,7 +1521,7 @@ public class TestNewPlanOperatorPlan ext
         aschema1.addField(new LogicalSchema.LogicalFieldSchema(
             "x", null, DataType.INTEGER));
         LOLoad A1 = new LOLoad(new FileSpec("/abc",
-            new FuncSpec(DummyLoad.class.getName(), new String[] {"x", "y"})), aschema1, lp1, null);
+            new FuncSpec(DummyLoad.class.getName(), new String[] {"x", "y"})), aschema1, lp1, new Configuration());
         lp1.add(A1);
         
         LogicalExpressionPlan fp1 = new LogicalExpressionPlan();
@@ -1539,7 +1539,7 @@ public class TestNewPlanOperatorPlan ext
         
         LogicalPlan lp2 = new LogicalPlan();
         LOLoad A2 = new LOLoad(new FileSpec("/abc",
-            new FuncSpec(DummyLoad.class.getName(), new String[] {"x", "z"})), null, lp2, null);
+            new FuncSpec(DummyLoad.class.getName(), new String[] {"x", "z"})), null, lp2, new Configuration());
         lp2.add(A2);
         
         LogicalExpressionPlan fp2 = new LogicalExpressionPlan();

Modified: pig/trunk/test/org/apache/pig/test/TestPigRunner.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestPigRunner.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestPigRunner.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestPigRunner.java Wed Nov  2 18:32:28 2011
@@ -562,7 +562,8 @@ public class TestPigRunner {
         Assert.assertNotNull(ctx);
 
         assertTrue(ctx.extraJars.contains(ClassLoader.getSystemResource("pig-withouthadoop.jar")));
-        assertEquals("default", ctx.getProperties().getProperty("mapred.job.queue.name"));
+        assertTrue("default", ctx.getProperties().getProperty("mapred.job.queue.name")!=null && ctx.getProperties().getProperty("mapred.job.queue.name").equals("default")||
+                ctx.getProperties().getProperty("mapreduce.job.queuename")!=null && ctx.getProperties().getProperty("mapreduce.job.queuename").equals("default"));
        
     }
 

Modified: pig/trunk/test/org/apache/pig/test/TestProject.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestProject.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestProject.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestProject.java Wed Nov  2 18:32:28 2011
@@ -339,7 +339,7 @@ public class TestProject extends  junit.
         String query = "a = load '" + inputFileName + "';" +
                 "b = group a all;" +
                 "c = foreach b generate flatten(a.($1, $2)),a.$2;";
-        
+            
         PigServer ps = new PigServer(ExecType.LOCAL);
         Util.registerMultiLineQuery(ps, query);
         Iterator<Tuple> it = ps.openIterator("c");
@@ -347,10 +347,16 @@ public class TestProject extends  junit.
                 (Tuple) Util.getPigConstant("('world', null, {(null),(null)})"),
                 (Tuple) Util.getPigConstant("('bye', null, {(null),(null)})")
         };
-        int i = 0;
+        boolean contains0 = false;
+        boolean contains1 = false;
         while(it.hasNext()) {
-            assertEquals(expectedResults[i++].toString(), it.next().toString());
+            String actualResult = it.next().toString();
+            if (actualResult.equals(expectedResults[0].toString()))
+                contains0 = true;
+            if (actualResult.equals(expectedResults[1].toString()))
+                contains1 = true;
         }
+        assertTrue(contains0&&contains1);
     }
 
     @Test

Modified: pig/trunk/test/org/apache/pig/test/TestPruneColumn.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestPruneColumn.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestPruneColumn.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestPruneColumn.java Wed Nov  2 18:32:28 2011
@@ -573,13 +573,11 @@ public class TestPruneColumn extends Tes
         pigServer.registerQuery("C = foreach B generate $1;");
         Iterator<Tuple> iter = pigServer.openIterator("C");
         
+        String[] expected = new String[] {
+                "({(1,2,3),(2,5,2)})"
+        };
         assertTrue(iter.hasNext());
-        Tuple t = iter.next();
-        
-        assertTrue(t.size()==1);
-        assertTrue(t.get(0).toString().equals("{(1,2,3),(2,5,2)}"));
-        
-        assertFalse(iter.hasNext());
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected, org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("C")));   
         
         assertTrue(emptyLogFileMessage());
     }
@@ -1215,13 +1213,11 @@ public class TestPruneColumn extends Tes
         
         Iterator<Tuple> iter = pigServer.openIterator("C");
         
-        assertTrue(iter.hasNext());
-        Tuple t = iter.next();
-        assertTrue(t.size()==2);
-        assertTrue(t.get(0).toString().equals("all"));
-        assertTrue(t.get(1).toString().equals("{([key2#2,key1#1],1),([key2#4,key1#2],2)}"));
+        String[] expected = new String[] {
+                "(all,{([key2#2,key1#1],1),([key2#4,key1#2],2)})"
+        };
         
-        assertFalse(iter.hasNext());
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected, org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("C")));
         
         assertTrue(checkLogFileMessage(new String[]{"Columns pruned for A: $0"}));
     }
@@ -1920,21 +1916,14 @@ public class TestPruneColumn extends Tes
         pigServer.registerQuery("D = filter C by A::a1>=B::a1;");
         Iterator<Tuple> iter = pigServer.openIterator("D");
 
-        assertTrue(iter.hasNext());
-        Tuple t = iter.next();
-        assertTrue(t.toString().equals("(1,2,3,1,2)"));
+        String [] expected = new String[] {
+                "(1,2,3,1,2)",
+                "(1,3,2,1,2)",
+                "(1,3,2,1,3)",
+                "(2,5,2,2,5)"
+        };
         
-        assertTrue(iter.hasNext());
-        t = iter.next();
-        assertTrue(t.toString().equals("(1,3,2,1,2)"));
-        
-        assertTrue(iter.hasNext());
-        t = iter.next();
-        assertTrue(t.toString().equals("(1,3,2,1,3)"));
-
-        assertTrue(iter.hasNext());
-        t = iter.next();
-        assertTrue(t.toString().equals("(2,5,2,2,5)"));
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected, org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("D")));
         
         assertTrue(emptyLogFileMessage());
     }
@@ -2080,20 +2069,10 @@ public class TestPruneColumn extends Tes
         pigServer.registerQuery("F = foreach E generate a0;");
         Iterator<Tuple> iter = pigServer.openIterator("F");
         
-        assertTrue(iter.hasNext());
-        Tuple t = iter.next();
-        
-        assertTrue(t.size()==1);
-        assertTrue(t.toString().equals("(1)"));
+        String[] expected = new String[] {"(1)", "(2)"};
         
-        assertTrue(iter.hasNext());
-        t = iter.next();
-        
-        assertTrue(t.size()==1);
-        assertTrue(t.toString().equals("(2)"));
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected, org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("F")));
 
-        assertFalse(iter.hasNext());
-        
         assertTrue(checkLogFileMessage(new String[]{"Columns pruned for A: $1"}));
     }
 

Modified: pig/trunk/test/org/apache/pig/test/TestScalarAliases.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestScalarAliases.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestScalarAliases.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestScalarAliases.java Wed Nov  2 18:32:28 2011
@@ -410,16 +410,13 @@ public class TestScalarAliases  {
 
         Iterator<Tuple> iter = pigServer.openIterator("Y");
 
-        Tuple t = iter.next();
-        assertTrue(t.toString().equals("(1,5,Total3,three)"));
-
-        t = iter.next();
-        assertTrue(t.toString().equals("(2,10,Total3,three)"));
-
-        t = iter.next();
-        assertTrue(t.toString().equals("(3,20,Total3,three)"));
-
-        assertFalse(iter.hasNext());
+        String[] expected = new String[] {
+                "(1,5,Total3,three)",
+                "(2,10,Total3,three)",
+                "(3,20,Total3,three)"
+        };
+        
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected, org.apache.pig.newplan.logical.Util.translateSchema(pigServer.dumpSchema("Y")));
     }
 
     // See PIG-1434

Modified: pig/trunk/test/org/apache/pig/test/TestScriptUDF.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestScriptUDF.java?rev=1196747&r1=1196746&r2=1196747&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestScriptUDF.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestScriptUDF.java Wed Nov  2 18:32:28 2011
@@ -270,7 +270,7 @@ public class TestScriptUDF{
     public void testPythonScriptUDFBagInput() throws Exception{
         String[] script = {
                 "#!/usr/bin/python",
-                "@outputSchema(\"bag:{(y:{t:(word:chararray)})}\")",
+                "@outputSchema(\"bag:{(y:{t:(len:int,word:chararray)})}\")",
                 "def collect(bag):" ,
                 "\toutBag = []",
                 "\tfor word in bag:",
@@ -296,42 +296,13 @@ public class TestScriptUDF{
         pigServer.registerQuery("C = foreach B generate pig.collect(A);");
 
         Iterator<Tuple> iter = pigServer.openIterator("C");
-        Assert.assertTrue(iter.hasNext());
-        Tuple t = iter.next();
         
-        DataBag bag; 
-        Tuple tup;
-        bag = BagFactory.getInstance().newDefaultBag();
-        tup = TupleFactory.getInstance().newTuple();
-        tup.append(3);
-        tup.append("hello");
-        bag.add(tup);
-        tup = TupleFactory.getInstance().newTuple();
-        tup.append(3);
-        tup.append("world");
-        bag.add(tup);
-        tup = TupleFactory.getInstance().newTuple();
-        tup.append(3);
-        tup.append("program");
-        bag.add(tup);
-    
-        Assert.assertTrue(t.get(0).toString().equals(bag.toString()));
-        
-        Assert.assertTrue(iter.hasNext());
-        t = iter.next();
-        
-        bag = BagFactory.getInstance().newDefaultBag();
-        tup = TupleFactory.getInstance().newTuple();
-        tup.append(2);
-        tup.append("pig");
-        bag.add(tup);
-        tup = TupleFactory.getInstance().newTuple();
-        tup.append(2);
-        tup.append("hadoop");
-        bag.add(tup);
-        
-        Assert.assertTrue(t.get(0).toString().equals(bag.toString()));
-    }
+        String[] expected = new String[] {
+        		"({(3,hello),(3,world),(3,program)})",
+        		"({(2,hadoop),(2,pig)})"
+        };
+        Util.checkQueryOutputsAfterSortRecursive(iter, expected, "y: {(len:int, word:chararray)}");
+        }
     
     @Test
     public void testPythonScriptUDFMapInput() throws Exception{