You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ro...@apache.org on 2017/01/07 00:11:38 UTC

svn commit: r1777738 [1/2] - in /pig/trunk: ./ bin/ contrib/piggybank/java/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/ ivy/ shims/src/hadoop2/ shims/src/hadoop2/org/ shims/src/hadoop2/org/apache/ shims/src/hadoop2/org/apache...

Author: rohini
Date: Sat Jan  7 00:11:37 2017
New Revision: 1777738

URL: http://svn.apache.org/viewvc?rev=1777738&view=rev
Log:
PIG-4923: Drop Hadoop 1.x support in Pig 0.17 (szita via rohini)

Added:
    pig/trunk/ivy/ant-contrib-1.0b3.jar   (with props)
    pig/trunk/shims/src/hadoop2/
    pig/trunk/shims/src/hadoop2/org/
    pig/trunk/shims/src/hadoop2/org/apache/
    pig/trunk/shims/src/hadoop2/org/apache/pig/
    pig/trunk/shims/src/hadoop2/org/apache/pig/backend/
    pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/
    pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/
    pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/
    pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/trunk/shims/test/hadoop2/
    pig/trunk/src/org/apache/pig/backend/hadoop/PigATSClient.java
      - copied unchanged from r1777730, pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/PigATSClient.java
    pig/trunk/src/org/apache/pig/backend/hadoop/PigJobControl.java
      - copied, changed from r1777730, pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop23/PigJobControl.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
      - copied unchanged from r1777730, pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
      - copied unchanged from r1777730, pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/trunk/test/org/apache/pig/test/MiniCluster.java
      - copied unchanged from r1777730, pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
    pig/trunk/test/org/apache/pig/test/TezMiniCluster.java
      - copied unchanged from r1777730, pig/trunk/shims/test/hadoop23/org/apache/pig/test/TezMiniCluster.java
Removed:
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/PigATSClient.java
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/trunk/shims/src/hadoop20/org/apache/pig/backend/hadoop20/PigJobControl.java
    pig/trunk/shims/src/hadoop23/org/apache/hadoop/mapred/DowngradeHelper.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/PigATSClient.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop23/PigJobControl.java
    pig/trunk/shims/test/hadoop20/org/apache/pig/test/MiniCluster.java
    pig/trunk/shims/test/hadoop20/org/apache/pig/test/TezMiniCluster.java
    pig/trunk/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
    pig/trunk/shims/test/hadoop23/org/apache/pig/test/TezMiniCluster.java
    pig/trunk/test/excluded-tests-20
Modified:
    pig/trunk/.gitignore
    pig/trunk/BUILDING.md
    pig/trunk/CHANGES.txt
    pig/trunk/bin/pig
    pig/trunk/bin/pig.py
    pig/trunk/build.xml
    pig/trunk/contrib/piggybank/java/build.xml
    pig/trunk/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/IndexedStorage.java
    pig/trunk/ivy.xml
    pig/trunk/ivy/libraries.properties
    pig/trunk/src/docs/src/documentation/content/xdocs/start.xml
    pig/trunk/src/docs/src/documentation/content/xdocs/tabs.xml
    pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/AbstractAccumuloStorage.java
    pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/Utils.java
    pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/Launcher.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchLauncher.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchPOStoreImpl.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigOutputCommitter.java
    pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/tez/TezDagBuilder.java
    pig/trunk/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java
    pig/trunk/src/org/apache/pig/builtin/HiveUDFBase.java
    pig/trunk/src/org/apache/pig/builtin/OrcStorage.java
    pig/trunk/src/org/apache/pig/builtin/PigStorage.java
    pig/trunk/src/org/apache/pig/builtin/TextLoader.java
    pig/trunk/src/org/apache/pig/impl/builtin/PoissonSampleLoader.java
    pig/trunk/src/org/apache/pig/impl/io/PigFile.java
    pig/trunk/src/org/apache/pig/impl/util/JarManager.java
    pig/trunk/src/org/apache/pig/impl/util/Utils.java
    pig/trunk/src/org/apache/pig/tools/pigstats/PigStatsUtil.java
    pig/trunk/src/org/apache/pig/tools/pigstats/mapreduce/MRJobStats.java
    pig/trunk/src/org/apache/pig/tools/pigstats/mapreduce/MRPigStatsUtil.java
    pig/trunk/test/e2e/pig/build.xml
    pig/trunk/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java
    pig/trunk/test/org/apache/pig/parser/TestQueryParserUtils.java
    pig/trunk/test/org/apache/pig/test/TestBZip.java
    pig/trunk/test/org/apache/pig/test/TestJobControlCompiler.java
    pig/trunk/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java
    pig/trunk/test/org/apache/pig/test/TestMultiQueryCompiler.java
    pig/trunk/test/org/apache/pig/test/TestPigRunner.java
    pig/trunk/test/org/apache/pig/test/TestPigStatsMR.java
    pig/trunk/test/org/apache/pig/test/TestSkewedJoin.java
    pig/trunk/test/org/apache/pig/test/Util.java
    pig/trunk/test/perf/pigmix/bin/generate_data.sh
    pig/trunk/test/perf/pigmix/build.xml

Modified: pig/trunk/.gitignore
URL: http://svn.apache.org/viewvc/pig/trunk/.gitignore?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/.gitignore (original)
+++ pig/trunk/.gitignore Sat Jan  7 00:11:37 2017
@@ -23,3 +23,4 @@ contrib/piggybank/java/piggybank.jar
 conf/log4j.properties
 lib/jdiff/pig_*SNAPSHOT.xml
 test/resources/*.jar
+!ivy/ant-contrib-1.0b3.jar

Modified: pig/trunk/BUILDING.md
URL: http://svn.apache.org/viewvc/pig/trunk/BUILDING.md?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/BUILDING.md (original)
+++ pig/trunk/BUILDING.md Sat Jan  7 00:11:37 2017
@@ -13,18 +13,14 @@
 
 ## Building Pig
 
-To compile with Hadoop 1.x 
-
-    ant clean jar piggybank 
-
 To compile with Hadoop 2.x 
 
-    ant clean jar piggybank -Dhadoopversion=23
+    ant clean jar piggybank
 
 Building and running the tests needed before submitting a patch.
 For more details https://cwiki.apache.org/confluence/display/PIG/HowToContribute
     
-    ANT_OPTS='-Djavac.args="-Xlint -Xmaxwarns 1000" -Dhadoopversion=23'
+    ANT_OPTS='-Djavac.args="-Xlint -Xmaxwarns 1000"'
     ant ${ANT_OPTS} clean piggybank jar compile-test test-commit
     cd contrib/piggybank/java && ant ${ANT_OPTS} test
 

Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Sat Jan  7 00:11:37 2017
@@ -22,6 +22,8 @@ Trunk (unreleased changes)
  
 INCOMPATIBLE CHANGES
 
+PIG-4923: Drop Hadoop 1.x support in Pig 0.17 (szita via rohini)
+
 PIG-5067: Revisit union on numeric type and chararray to bytearray (knoguchi)
  
 IMPROVEMENTS

Modified: pig/trunk/bin/pig
URL: http://svn.apache.org/viewvc/pig/trunk/bin/pig?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/bin/pig (original)
+++ pig/trunk/bin/pig Sat Jan  7 00:11:37 2017
@@ -301,7 +301,8 @@ HADOOP_CORE_JAR=`echo ${HADOOP_HOME}/had
 if [ -z "$HADOOP_CORE_JAR" ]; then
     HADOOP_VERSION=2
 else
-    HADOOP_VERSION=1
+    echo "Pig requires Hadoop 2 to be present in HADOOP_HOME (currently: $HADOOP_HOME). Please install Hadoop 2.x"
+    exit 1
 fi
 
 # if using HBase, likely want to include HBase jars and config
@@ -377,11 +378,7 @@ if [ -n "$HADOOP_BIN" ]; then
     if [ -n "$PIG_JAR" ]; then
         CLASSPATH=${CLASSPATH}:$PIG_JAR
     else
-        if [ "$HADOOP_VERSION" == "1" ]; then
-            echo "Cannot locate pig-core-h${HADOOP_VERSION}.jar. do 'ant jar', and try again"
-        else
-            echo "Cannot locate pig-core-h${HADOOP_VERSION}.jar. do 'ant -Dhadoopversion=23 jar', and try again"
-        fi
+        echo "Cannot locate pig-core-h${HADOOP_VERSION}.jar. do 'ant jar', and try again"
         exit 1
     fi
 
@@ -402,8 +399,8 @@ if [ -n "$HADOOP_BIN" ]; then
         exec "$HADOOP_BIN" jar "$PIG_JAR" "${remaining[@]}"
     fi
 else
-    # use hadoop-core.jar to run local mode
-    PIG_JAR=`echo $PIG_HOME/pig*-core-h1.jar`
+    # use bundled hadoop to run local mode
+    PIG_JAR=`echo $PIG_HOME/pig*-core-h2.jar`
 
     if [ -n "$PIG_JAR" ]; then
         CLASSPATH="${CLASSPATH}:$PIG_JAR"
@@ -412,12 +409,12 @@ else
         exit 1
     fi
 
-    for f in $PIG_HOME/lib/h1/*.jar; do
+    for f in $PIG_HOME/lib/h2/*.jar; do
         CLASSPATH=${CLASSPATH}:$f;
     done
 
-    # Add bundled hadoop-core.jar
-    for f in $PIG_HOME/lib/hadoop1-runtime/*.jar; do
+    # Add bundled hadoop jars
+    for f in $PIG_HOME/lib/hadoop2-runtime/*.jar; do
         CLASSPATH=${CLASSPATH}:$f;
     done
 

Modified: pig/trunk/bin/pig.py
URL: http://svn.apache.org/viewvc/pig/trunk/bin/pig.py?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/bin/pig.py (original)
+++ pig/trunk/bin/pig.py Sat Jan  7 00:11:37 2017
@@ -338,7 +338,7 @@ hadoopCoreJars = glob.glob(os.path.join(
 if len(hadoopCoreJars) == 0:
   hadoopVersion = 2
 else:
-  hadoopVersion = 1
+  sys.exit("Cannot locate Hadoop 2 binaries, please install Hadoop 2.x and try again.")
 
 if hadoopBin != "":
   if debug == True:
@@ -361,10 +361,7 @@ if hadoopBin != "":
       if len(pigJars) == 1:
         pigJar = pigJars[0]
       else:
-        if hadoopVersion == 1:
-          sys.exit("Cannot locate pig-core-h1.jar do 'ant jar', and try again")
-        else:
-          sys.exit("Cannot locate pig-core-h2.jar do 'ant -Dhadoopversion=23 jar', and try again")
+        sys.exit("Cannot locate pig-core-h2.jar do 'ant jar', and try again")
 
   pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "h" + str(hadoopVersion), "*.jar"))
   for jar in pigLibJars:
@@ -393,13 +390,13 @@ if hadoopBin != "":
 else:
   # fall back to use fat pig.jar
   if debug == True:
-    print "Cannot find local hadoop installation, using bundled hadoop 1"
-    
-  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-core-h1.jar")):
-    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-core-h1.jar")
+    print "Cannot find local hadoop installation, using bundled hadoop 2"
+
+  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-core-h2.jar")):
+    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-core-h2.jar")
 
   else:
-    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-*-core-h1.jar"))
+    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-*-core-h2.jar"))
 
     if len(pigJars) == 1:
       pigJar = pigJars[0]
@@ -407,15 +404,15 @@ else:
     elif len(pigJars) > 1:
       print "Ambiguity with pig jars found the following jars"
       print pigJars
-      sys.exit("Please remove irrelavant jars from %s" % os.path.join(os.environ['PIG_HOME'], "pig-core-h1.jar"))
+      sys.exit("Please remove irrelavant jars from %s" % os.path.join(os.environ['PIG_HOME'], "pig-core-h2.jar"))
     else:
-      sys.exit("Cannot locate pig-core-h1.jar. do 'ant jar' and try again")
+      sys.exit("Cannot locate pig-core-h2.jar. do 'ant jar' and try again")
 
-  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "h1", "*.jar"))
+  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "h2", "*.jar"))
   for jar in pigLibJars:
     classpath += os.pathsep + jar
 
-  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "hadoop1-runtime", "*.jar"))
+  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "hadoop2-runtime", "*.jar"))
   for jar in pigLibJars:
     classpath += os.pathsep + jar
 
@@ -423,7 +420,7 @@ else:
   pigClass = "org.apache.pig.Main"
   if debug == True:
     print "dry runXXX:"
-    print "%s %s %s -classpath %s %s %s" % (java, javaHeapMax, pigOpts, classpath, pigClass, ' '.join(restArgs)) 
+    print "%s %s %s -classpath %s %s %s" % (java, javaHeapMax, pigOpts, classpath, pigClass, ' '.join(restArgs))
   else:
     cmdLine = java + ' ' + javaHeapMax + ' ' + pigOpts
     cmdLine += ' ' + '-classpath ' + classpath + ' ' + pigClass +  ' ' + ' '.join(restArgs)

Modified: pig/trunk/build.xml
URL: http://svn.apache.org/viewvc/pig/trunk/build.xml?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/build.xml (original)
+++ pig/trunk/build.xml Sat Jan  7 00:11:37 2017
@@ -20,6 +20,13 @@
          xmlns:ivy="antlib:org.apache.ivy.ant">
     <!-- Load all the default properties, and any the user wants    -->
     <!-- to contribute (without having to type -D or edit this file -->
+
+    <taskdef resource="net/sf/antcontrib/antcontrib.properties">
+        <classpath>
+            <pathelement location="${basedir}/ivy/ant-contrib-1.0b3.jar"/>
+        </classpath>
+    </taskdef>
+
     <property file="${user.home}/build.properties" />
     <property file="${basedir}/build.properties" />
 
@@ -35,7 +42,7 @@
     <property name="pig.version.suffix" value="-SNAPSHOT" />
     <property name="version" value="${pig.version}${pig.version.suffix}" />
     <property name="final.name" value="${name}-${version}" />
-    <property name="year" value="2007-2012" />
+    <property name="year" value="2007-2016" />
 
     <!-- source properties -->
     <property name="lib.dir" value="${basedir}/lib" />
@@ -69,7 +76,6 @@
 
     <!-- artifact jar file names -->
     <property name="artifact.pig.jar" value="${final.name}.jar"/>
-    <property name="artifact.pig-h1.jar" value="${final.name}-h1.jar"/>
     <property name="artifact.pig-h2.jar" value="${final.name}-h2.jar"/>
     <property name="artifact.pig-sources.jar" value="${final.name}-sources.jar"/>
     <property name="artifact.pig-javadoc.jar" value="${final.name}-javadoc.jar"/>
@@ -77,15 +83,12 @@
 
     <!-- jar names. TODO we might want to use the svn reversion name in the name in case it is a dev version -->
     <property name="output.jarfile.withouthadoop" value="${build.dir}/${final.name}-withouthadoop.jar" />
-    <property name="output.jarfile.withouthadoop-h1" value="${legacy.dir}/${final.name}-withouthadoop-h1.jar" />
     <property name="output.jarfile.withouthadoop-h2" value="${legacy.dir}/${final.name}-withouthadoop-h2.jar" />
     <property name="output.jarfile.core" value="${build.dir}/${artifact.pig.jar}" />
-    <property name="output.jarfile.core-h1" value="${build.dir}/${artifact.pig-h1.jar}" />
     <property name="output.jarfile.core-h2" value="${build.dir}/${artifact.pig-h2.jar}" />
     <property name="output.jarfile.sources" value="${build.dir}/${artifact.pig-sources.jar}" />
     <property name="output.jarfile.javadoc" value="${build.dir}/${artifact.pig-javadoc.jar}" />
     <!-- Maintain old pig.jar in top level directory. -->
-    <property name="output.jarfile.backcompat-core-h1" value="${basedir}/${final.name}-core-h1.jar" />
     <property name="output.jarfile.backcompat-core-h2" value="${basedir}/${final.name}-core-h2.jar" />
 
     <!-- test properties -->
@@ -104,8 +107,6 @@
     <property name="test.smoke.file" value="${test.src.dir}/smoke-tests"/>
     <property name="test.all.file" value="${test.src.dir}/all-tests"/>
     <property name="test.exclude.file" value="${test.src.dir}/excluded-tests"/>
-    <property name="test.exclude.file.20" value="${test.src.dir}/excluded-tests-20"/>
-    <property name="test.exclude.file.23" value="${test.src.dir}/excluded-tests-23"/>
     <property name="test.exclude.file.mr" value="${test.src.dir}/excluded-tests-mr"/>
     <property name="test.exclude.file.tez" value="${test.src.dir}/excluded-tests-tez"/>
     <property name="pigunit.jarfile" value="pigunit.jar" />
@@ -151,9 +152,8 @@
 	
     <target name="setTezEnv">
         <propertyreset name="test.timeout" value="900000" />
-        <propertyreset name="hadoopversion" value="23" />
-        <propertyreset name="isHadoop23" value="true" />
-        <propertyreset name="hbase.hadoop.version" value="hadoop2" />
+        <propertyreset name="hadoopversion" value="2" />
+        <propertyreset name="isHadoop2" value="true" />
         <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
         <propertyreset name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
         <propertyreset name="src.exclude.dir" value="" />
@@ -201,40 +201,42 @@
     <property name="loglevel" value="quiet" />
     <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
 
-    <property name="hadoopversion" value="20" />
 
-    <condition property="isHadoop23">
+    <!--
+      Hadoop master version
+      (Value 23 is translated for backward compatibility in old build scripts)
+    -->
+    <if>
         <equals arg1="${hadoopversion}" arg2="23"/>
-    </condition>
+        <then>
+            <echo>Property setting hadoopversion=23 is deprecated. Overwriting to hadoopversion=2</echo>
+            <var name="hadoopversion" unset="true"/>
+            <property name="hadoopversion" value="2" />
+        </then>
+    </if>
+    <property name="hadoopversion" value="2" />
 
-    <condition property="hbase.hadoop.version" value="hadoop1" else="hadoop2">
-        <not>
-            <equals arg1="${hadoopversion}" arg2="23"/>
-        </not>
+    <condition property="isHadoop2">
+        <equals arg1="${hadoopversion}" arg2="2"/>
     </condition>
 
     <!--
       HBase master version
-      Denotes how the HBase dependencies are layout. Value "94" denotes older
-      format where all HBase code is present in one single jar, which is the
-      way HBase is available up to version 0.94. Value "95" denotes new format
-      where HBase is cut into multiple dependencies per each major subsystem,
-      e.g. "client", "server", ... . Only values "94" and "95" are supported
-      at the moment.
+      (Value 95 is translated for backward compatibility in old build scripts)
     -->
-    <property name="hbaseversion" value="95" />
-
-    <!-- exclude tez code if not hadoop20 -->
-    <condition property="src.exclude.dir" value="**/tez/**" else="">
-        <not>
-            <equals arg1="${hadoopversion}" arg2="23"/>
-        </not>
-    </condition>
+    <if>
+        <equals arg1="${hbaseversion}" arg2="95"/>
+        <then>
+            <echo>Property setting hbaseversion=95 is deprecated. Overwriting to hbaseversion=1</echo>
+            <var name="hbaseversion" unset="true"/>
+            <property name="hbaseversion" value="1" />
+        </then>
+    </if>
+    <property name="hbaseversion" value="1" />
 
     <property name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
     <property name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
 
-    <property name="hadoop.jar" value="hadoop-core-${hadoop-core.version}.jar" />
     <property name="asfrepo" value="https://repository.apache.org"/>
     <property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
     <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
@@ -379,11 +381,6 @@
             <include name="joda-time-${joda-time.version}.jar"/>
             <include name="automaton-${automaton.version}.jar"/>
             <include name="jansi-${jansi.version}.jar"/>
-            <include name="jackson-mapper-asl-${jackson.version}.jar" unless="isHadoop23"/>
-            <include name="jackson-core-asl-${jackson.version}.jar" unless="isHadoop23"/>
-            <include name="guava-${guava.version}.jar" unless="isHadoop23"/>
-            <include name="snappy-java-${snappy.version}.jar" unless="isHadoop23"/>
-            <include name="asm-${asm.version}.jar" unless="isHadoop23"/>
         </patternset>
     </fileset>
 
@@ -545,6 +542,7 @@
         <echo>*** Building Main Sources ***</echo>
         <echo>*** To compile with all warnings enabled, supply -Dall.warnings=1 on command line ***</echo>
         <echo>*** Else, you will only be warned about deprecations ***</echo>
+        <echo>*** Hadoop version used: ${hadoopversion} ; HBase version used: ${hbaseversion} ***</echo>
         <compileSources sources="${src.dir};${src.gen.dir};${src.lib.dir}/bzip2;${src.shims.dir}"
             excludes="${src.exclude.dir}" dist="${build.classes}" cp="classpath" warnings="${javac.args.warnings}" />
         <copy todir="${build.classes}/META-INF">
@@ -674,31 +672,14 @@
     </target>
 
     <!-- ================================================================== -->
-    <!-- Facede to build pig.jar for both Hadoop 1 and Hadoop 2             -->
-    <!-- ================================================================== -->
-    <target name="jar-h12" description="Create pig for both Hadoop 1 and Hadoop 2">
-        <propertyreset name="hadoopversion" value="20" />
-        <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
-        <antcall target="clean" inheritRefs="true" inheritall="true"/>
-        <antcall target="jar" inheritRefs="true" inheritall="true"/>
-        <antcall target="copyHadoop1LocalRuntimeDependencies"/>
-        <delete dir="${build.dir}" />
-        <propertyreset name="hadoopversion" value="23" />
-        <propertyreset name="hbase.hadoop.version" value="hadoop2" />
-        <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
-        <propertyreset name="src.exclude.dir" value="" />
-        <antcall target="jar" inheritRefs="true" inheritall="true"/>
-    </target>
-
-    <!-- ================================================================== -->
     <!-- Make pig.jar                                                       -->
     <!-- ================================================================== -->
     <target name="jar" depends="compile,ivy-buildJar" description="Create pig core jar">
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.core}" includedJars="core.dependencies.jar"/>
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.withouthadoop}" includedJars="runtime.dependencies-withouthadoop.jar"/>
         <antcall target="copyCommonDependencies"/>
-        <antcall target="copyh1Dependencies"/>
         <antcall target="copyh2Dependencies"/>
+        <antcall target="copyHadoop2LocalRuntimeDependencies" />
     </target>
 
     <target name="copyCommonDependencies">
@@ -735,19 +716,7 @@
         </copy>
     </target>
 
-    <target name="copyh1Dependencies" unless="isHadoop23">
-        <mkdir dir="${lib.dir}/h1" />
-        <copy todir="${lib.dir}/h1">
-            <fileset dir="${ivy.lib.dir}" includes="avro-mapred-*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="hive-shims-0.*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="hbase-*hadoop1.jar"/>
-        </copy>
-        <copy file="${output.jarfile.core}" tofile="${output.jarfile.backcompat-core-h1}"/>
-        <mkdir dir="${legacy.dir}" />
-        <move file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.withouthadoop-h1}"/>
-    </target>
-
-    <target name="copyh2Dependencies" if="isHadoop23">
+    <target name="copyh2Dependencies" if="isHadoop2">
         <mkdir dir="${lib.dir}/h2" />
         <copy todir="${lib.dir}/h2">
             <fileset dir="${ivy.lib.dir}" includes="avro-mapred-*.jar"/>
@@ -761,18 +730,21 @@
         <move file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.withouthadoop-h2}"/>
     </target>
 
-    <target name="copyHadoop1LocalRuntimeDependencies">
-        <mkdir dir="${lib.dir}/hadoop1-runtime" />
-        <copy todir="${lib.dir}/hadoop1-runtime">
-            <fileset dir="${ivy.lib.dir}" includes="hadoop-core-*.jar"/>
+    <target name="copyHadoop2LocalRuntimeDependencies">
+        <mkdir dir="${lib.dir}/hadoop2-runtime" />
+        <copy todir="${lib.dir}/hadoop2-runtime">
+            <fileset dir="${ivy.lib.dir}" includes="hadoop-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-cli-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-configuration-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="commons-collections-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-lang-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-codec-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-io-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-logging-*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="commons-httpclient-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="httpclient-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="httpcore-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="log4j-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="slf4j-*.jar"/>
         </copy>
     </target>
 
@@ -932,8 +904,6 @@
                     <patternset>
                        <includesfile name="@{test.file}"/>
                        <excludesfile name="${test.exclude.file}" if="test.exclude.file"/>
-                       <excludesfile name="${test.exclude.file.20}" unless="isHadoop23"/>
-                       <excludesfile name="${test.exclude.file.23}" if="isHadoop23"/>
                        <excludesfile name="${test.exclude.file.for.exectype}"/>
                     </patternset>
                     <exclude name="**/${exclude.testcase}.java" if="exclude.testcase" />
@@ -962,10 +932,10 @@
 
     <target name="test-core-mrtez" description="run core tests on both mr and tez mode"
             depends="setWindowsPath,setLinuxPath,compile-test,jar,debugger.check,jackson-pig-3039-test-download">
-        <fail message="hadoopversion must be set to 23 when invoking test-core-mrtez">
+        <fail message="hadoopversion must be set to 2 when invoking test-core-mrtez">
           <condition>
             <not>
-              <equals arg1="${hadoopversion}" arg2="23" />
+              <equals arg1="${hadoopversion}" arg2="2" />
             </not>
           </condition>
         </fail>
@@ -1049,10 +1019,7 @@
     <!-- ================================================================== -->
     <!-- Distribution                                                       -->
     <!-- ================================================================== -->
-    <target name="package-h12" depends="jar-h12, docs, api-report, piggybank" description="Create a Pig tar release">
-        <package-base/>
-    </target>
-	
+
     <target name="package" depends="jar, docs, api-report, piggybank" description="Create a Pig tar release">
         <package-base/>
     </target>
@@ -1072,7 +1039,6 @@
             <fileset dir="${lib.dir}"/>
         </copy>
 
-        <copy file="${output.jarfile.backcompat-core-h1}" tofile="${tar.dist.dir}/${final.name}-core-h1.jar" failonerror="false"/>
         <copy file="${output.jarfile.backcompat-core-h2}" tofile="${tar.dist.dir}/${final.name}-core-h2.jar" failonerror="false"/>
 
         <copy todir="${tar.dist.dir}/lib" file="contrib/piggybank/java/piggybank.jar"/>
@@ -1150,10 +1116,6 @@
         <tar-base/>
     </target>
 
-    <target name="tar-h12" depends="package-h12" description="Source distribution">
-        <tar-base/>
-    </target>
-
     <macrodef name="tar-base">
       <sequential>
         <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${artifact.pig.tar}">
@@ -1239,15 +1201,13 @@
           uri="urn:maven-artifact-ant"
           classpathref="mvn-ant-task.classpath"/>
     </target>
-    <target name="mvn-install" depends="mvn-taskdef,jar-h12, set-version, source-jar,
-      javadoc-jar, pigunit-jar, smoketests-jar, piggybank"
+    <target name="mvn-install" depends="mvn-taskdef, mvn-build, set-version"
          description="To install pig to local filesystem's m2 cache">
          <artifact:pom file="${pig.pom}" id="pig"/>
-          <artifact:install file="${output.jarfile.core-h1}">
+          <artifact:install file="${output.jarfile.core-h2}">
                <pom refid="pig"/>
            <attach file="${output.jarfile.sources}" classifier="sources" />
            <attach file="${output.jarfile.javadoc}" classifier="javadoc" />
-           <attach file="${output.jarfile.core-h2}" classifier="h2" />
           </artifact:install>
          <artifact:pom file="${pigunit.pom}" id="pigunit"/>
           <artifact:install file="${pigunit.jarfile}">
@@ -1263,10 +1223,9 @@
          </artifact:install>
     </target>
 
-    <target name="mvn-build" depends="jar-h12, source-jar,
+    <target name="mvn-build" depends="jar, source-jar,
                                       javadoc-jar, smoketests-jar, pigunit-jar, piggybank"
          description="To build the pig jar artifacts to be deployed to apache maven repository">
-        <move file="${output.jarfile.backcompat-core-h1}" tofile="${output.jarfile.core}"/>
         <move file="${output.jarfile.backcompat-core-h2}" tofile="${output.jarfile.core-h2}"/>
     </target>
 
@@ -1657,7 +1616,9 @@
 
      <target name="ivy-resolve" depends="ivy-init" unless="ivy.resolved" description="Resolve Ivy dependencies">
        <property name="ivy.resolved" value="true"/>
+       <echo>*** Ivy resolve with Hadoop ${hadoopversion} and HBase ${hbaseversion} ***</echo>
        <ivy:resolve log="${loglevel}" settingsRef="${ant.project.name}.ivy.settings" conf="compile"/>
+       <ivy:report toDir="build/ivy/report"/>
      </target>
 
      <target name="ivy-compile" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for compile configuration">

Modified: pig/trunk/contrib/piggybank/java/build.xml
URL: http://svn.apache.org/viewvc/pig/trunk/contrib/piggybank/java/build.xml?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/contrib/piggybank/java/build.xml (original)
+++ pig/trunk/contrib/piggybank/java/build.xml Sat Jan  7 00:11:37 2017
@@ -16,6 +16,13 @@
 -->
 
 <project basedir="." default="jar" name="pigudf">
+
+    <taskdef resource="net/sf/antcontrib/antcontrib.properties">
+        <classpath>
+            <pathelement location="../../../ivy/ant-contrib-1.0b3.jar"/>
+        </classpath>
+    </taskdef>
+
     <property file="../../../build.properties" />
     <!-- javac properties -->
     <property name="javac.debug" value="on" />
@@ -38,16 +45,30 @@
     <property name="src.dir" value="src/main/java/org/apache/pig/piggybank" />
     <property name="hsqldb.jar" value="../../../build/ivy/lib/Pig/hsqldb-1.8.0.10.jar"/>
 
-    <!-- JobHistoryLoader currently does not support 0.23 -->
-    <condition property="build.classes.excludes" value="**/HadoopJobHistoryLoader.java" else="">
+    <!--
+      Hadoop master version
+      (Value 23 is translated for backward compatibility in old build scripts)
+    -->
+    <if>
         <equals arg1="${hadoopversion}" arg2="23"/>
+        <then>
+            <echo>Property setting hadoopversion=23 is deprecated. Overwriting to hadoopversion=2</echo>
+            <var name="hadoopversion" unset="true"/>
+            <property name="hadoopversion" value="2" />
+        </then>
+    </if>
+    <property name="hadoopversion" value="2" />
+
+    <!-- JobHistoryLoader currently does not support 2 -->
+    <condition property="build.classes.excludes" value="**/HadoopJobHistoryLoader.java" else="">
+        <equals arg1="${hadoopversion}" arg2="2"/>
     </condition>
     <condition property="test.classes.excludes" value="**/TestHadoopJobHistoryLoader.java" else="">
-        <equals arg1="${hadoopversion}" arg2="23"/>
+        <equals arg1="${hadoopversion}" arg2="2"/>
     </condition>
 
-    <condition property="hadoopsuffix" value="2" else="1">
-        <equals arg1="${hadoopversion}" arg2="23"/>
+    <condition property="hadoopsuffix" value="2" else="">
+        <equals arg1="${hadoopversion}" arg2="2"/>
     </condition>
 
     <!-- jar properties -->

Modified: pig/trunk/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/IndexedStorage.java
URL: http://svn.apache.org/viewvc/pig/trunk/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/IndexedStorage.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/IndexedStorage.java (original)
+++ pig/trunk/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/IndexedStorage.java Sat Jan  7 00:11:37 2017
@@ -60,7 +60,6 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.util.StorageUtil;
 import org.apache.pig.data.DataType;
 import org.apache.pig.data.DataByteArray;
-import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
 
 /**
  * <code>IndexedStorage</code> is a form of <code>PigStorage</code> that supports a

Modified: pig/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/pig/trunk/ivy.xml?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/ivy.xml (original)
+++ pig/trunk/ivy.xml Sat Jan  7 00:11:37 2017
@@ -38,10 +38,8 @@
     <conf name="jdiff" visibility="private"/>
     <conf name="checkstyle" visibility="private"/>
     <conf name="buildJar" extends="compile,test" visibility="private"/>
-    <conf name="hadoop20" visibility="private"/>
-    <conf name="hadoop23" visibility="private"/>
-    <conf name="hbase94" visibility="private"/>
-    <conf name="hbase95" visibility="private"/>
+    <conf name="hadoop2" visibility="private"/>
+    <conf name="hbase1" visibility="private"/>
   </configurations>
   <publications>
     <artifact name="pig" conf="master"/>
@@ -60,17 +58,17 @@
     <dependency org="commons-beanutils" name="commons-beanutils-core" rev="${commons-beanutils.version}"
       conf="checkstyle->master"/>
     <dependency org="xmlenc" name="xmlenc" rev="${xmlenc.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.jersey" name="jersey-bundle" rev="${jersey.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.jersey" name="jersey-server" rev="${jersey.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.jersey.contribs" name="jersey-guice" rev="${jersey.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-codec" name="commons-codec" rev="${commons-codec.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-httpclient" name="commons-httpclient" rev="${commons-httpclient.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-el" name="commons-el" rev="${commons-el.version}"
       conf="compile->master"/>
     <dependency org="commons-io" name="commons-io" rev="${commons-io.version}"
@@ -88,92 +86,86 @@
     <dependency org="nl.basjes.parse" name="parser-core" rev="${basjes-httpdlog-pigloader.version}"
       conf="compile->master"/>
     <dependency org="commons-configuration" name="commons-configuration" rev="${commons-configuration.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="commons-collections" name="commons-collections" rev="${commons-collections.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.servlet" name="servlet-api" rev="${servlet-api.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.ws.rs" name="jsr311-api" rev="${jsr311-api.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.google.protobuf" name="protobuf-java" rev="${protobuf-java.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.inject" name="javax.inject" rev="${javax-inject.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="javax.xml.bind" name="jaxb-api" rev="${jaxb-api.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.sun.xml.bind" name="jaxb-impl" rev="${jaxb-impl.version}"
-      conf="hadoop23->master"/> 
+      conf="hadoop2->master"/>
     <dependency org="com.google.inject" name="guice" rev="${guice.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="com.google.inject.extensions" name="guice-servlet" rev="${guice-servlet.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="aopalliance" name="aopalliance" rev="${aopalliance.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.mortbay.jetty" name="jsp-2.1" rev="${jasper.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.mortbay.jetty" name="jsp-api-2.1" rev="${jasper.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="log4j" name="log4j" rev="${log4j.version}"
       conf="compile->master"/>
-    <dependency org="com.sun.jersey" name="jersey-core" rev="${jersey-core.version}"
-      conf="hadoop20->default"/>
-    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop-core.version}"
-      conf="hadoop20->default"/>
-    <dependency org="org.apache.hadoop" name="hadoop-test" rev="${hadoop-test.version}"
-      conf="hadoop20->default"/>
-    <dependency org="org.apache.hadoop" name="hadoop-annotations" 
-      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+    <dependency org="org.apache.hadoop" name="hadoop-annotations"
+      rev="${hadoop-common.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-auth" 
-      rev="${hadoop-common.version}" conf="hadoop23->master"/>
+      rev="${hadoop-common.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-common" 
-      rev="${hadoop-common.version}" conf="hadoop23->master">
+      rev="${hadoop-common.version}" conf="hadoop2->master">
       <artifact name="hadoop-common" ext="jar" />
       <artifact name="hadoop-common" type="tests" ext="jar" m:classifier="tests" />
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-hdfs"
-      rev="${hadoop-hdfs.version}" conf="hadoop23->master">
+      rev="${hadoop-hdfs.version}" conf="hadoop2->master">
       <artifact name="hadoop-hdfs" ext="jar" />
       <artifact name="hadoop-hdfs" type="tests" ext="jar" m:classifier="tests" />
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master">
+      conf="hadoop2->master">
         <artifact name="hadoop-mapreduce-client-jobclient" ext="jar" />
         <artifact name="hadoop-mapreduce-client-jobclient" type="tests" ext="jar" m:classifier="tests"/>
         <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
         <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master">
+      conf="hadoop2->master">
       <artifact name="hadoop-yarn-server-tests" type="jar" m:classifier="tests"/>
     </dependency>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master" />
+      conf="hadoop2->master" />
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-shuffle" rev="${hadoop-mapreduce.version}"
-      conf="hadoop23->master" />
+      conf="hadoop2->master" />
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-api" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-common" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-web-proxy" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-common" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-nodemanager" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-resourcemanager" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-client" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-yarn-server-applicationhistoryservice" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs" 
-      rev="${hadoop-mapreduce.version}" conf="hadoop23->master"/>
+      rev="${hadoop-mapreduce.version}" conf="hadoop2->master"/>
     <dependency org="org.mortbay.jetty" name="jetty" rev="${jetty.version}"
       conf="compile->master">
       <artifact name="jetty" ext="jar" />
@@ -192,13 +184,7 @@
       <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
     </dependency>
     <dependency org="org.apache.avro" name="avro-mapred" rev="${avro.version}"
-      conf="hadoop20->default;checkstyle->master">
-      <exclude org="org.codehaus.jackson" module="jackson-core-asl"/>
-      <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
-      <exclude org="io.netty" module="netty"/>
-    </dependency>
-    <dependency org="org.apache.avro" name="avro-mapred" rev="${avro.version}"
-      conf="hadoop23->default;checkstyle->master">
+      conf="hadoop2->default;checkstyle->master">
       <artifact name="avro-mapred" type="jar" m:classifier="hadoop2"/>
       <exclude org="org.codehaus.jackson" module="jackson-core-asl"/>
       <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
@@ -260,37 +246,14 @@
     <dependency org="org.antlr" name="ST4" rev="${stringtemplate.version}" conf="compile->default"/>
     <dependency org="org.apache.zookeeper" name="zookeeper" rev="${zookeeper.version}" conf="compile->master"/>
     <dependency org="io.netty" name="netty" rev="${netty.version}" conf="test->master"/>
+    <dependency org="io.netty" name="netty-all" rev="${netty-all.version}" conf="test->master" />
     <dependency org="dk.brics.automaton" name="automaton" rev="1.11-8" conf="compile->default"/>
 
     <dependency org="org.jruby" name="jruby-complete" rev="${jruby.version}" conf="compile->master"/>
     <dependency org="asm" name="asm" rev="${asm.version}" conf="compile->default"/>
 
-    <!-- HBase dependency in format for releases up to 0.94 (including) -->
-    <dependency org="org.apache.hbase" name="hbase" rev="${hbase94.version}" conf="hbase94->master">
-      <artifact name="hbase" type="jar"/>
-      <artifact name="hbase" type="test-jar" ext="jar" m:classifier="tests"/>
-      <exclude org="org.apache.thrift" module="thrift"/>
-      <exclude org="org.apache.hadoop" module="hadoop-core"/>
-      <exclude org="org.apache.ant" module="ant" />
-      <exclude org="org.slf4j" module="slf4j"/>
-      <exclude org="org.slf4j" module="slf4j-api"/>
-      <exclude org="org.slf4j" module="slf4j-log4j12" />
-      <exclude org="org.slf4j" module="log4j12"/>
-      <exclude org="org.slf4j" module="log4j-over-slf4j"/>
-      <exclude org="stax" module="stax-api" />
-      <exclude org="javax.xml.bind" module="jaxb-api" />
-      <exclude org="javax.ws.rs" module="jsr311-api" />
-      <exclude org="tomcat" module="jasper-runtime"/>
-      <exclude org="tomcat" module="jasper-compiler"/>
-      <exclude org="com.google.protobuf" module="protobuf-java"/>
-      <exclude org="com.sun.jersey" module="jersey-core"/>
-      <exclude org="com.sun.jersey" module="jersey-server"/>
-      <exclude org="com.sun.jersey" module="jersey-json"/>
-      <exclude org="asm" module="asm"/>
-    </dependency>
-
     <!-- HBase dependency in format for releases higher or equal to 0.95 -->
-    <dependency org="org.apache.hbase" name="hbase-client" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-client" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-client" type="jar"/>
       <artifact name="hbase-client" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.slf4j" module="slf4j-api"/>
@@ -306,7 +269,7 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-common" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-common" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-common" type="jar"/>
       <artifact name="hbase-common" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.apache.hadoop" module="hadoop-core"/>
@@ -321,7 +284,7 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-server" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-server" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-server" type="jar"/>
       <artifact name="hbase-server" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.apache.hadoop" module="hadoop-core"/>
@@ -338,20 +301,20 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-protocol" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-protocol" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-protocol" type="jar"/>
       <artifact name="hbase-protocol" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="com.google.protobuf" module="protobuf-java"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-hadoop-compat" rev="${hbase95.version}" conf="hbase95->master">
+    <dependency org="org.apache.hbase" name="hbase-hadoop-compat" rev="${hbase1.version}" conf="hbase1->master">
       <artifact name="hbase-hadoop-compat" type="jar"/>
       <artifact name="hbase-hadoop-compat" type="test-jar" ext="jar" m:classifier="tests"/>
     </dependency>
 
-    <dependency org="org.apache.hbase" name="hbase-${hbase.hadoop.version}-compat" rev="${hbase95.version}" conf="hbase95->master">
-      <artifact name="hbase-${hbase.hadoop.version}-compat" type="jar"/>
-      <artifact name="hbase-${hbase.hadoop.version}-compat" type="test-jar" ext="jar" m:classifier="tests"/>
+    <dependency org="org.apache.hbase" name="hbase-hadoop2-compat" rev="${hbase1.version}" conf="hbase1->master">
+      <artifact name="hbase-hadoop2-compat" type="jar"/>
+      <artifact name="hbase-hadoop2-compat" type="test-jar" ext="jar" m:classifier="tests"/>
       <exclude org="org.apache.hadoop" module="hadoop-core"/>
       <exclude org="org.slf4j" module="slf4j-api"/>
       <exclude org="stax" module="stax-api" />
@@ -364,14 +327,14 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.htrace" name="htrace-core" rev="3.0.4" conf="hadoop23->master"/>
-    <dependency org="org.apache.htrace" name="htrace-core" rev="${htrace.version}" conf="hadoop23->master"/>
+    <dependency org="org.htrace" name="htrace-core" rev="3.0.4" conf="hadoop2->master"/>
+    <dependency org="org.apache.htrace" name="htrace-core" rev="${htrace.version}" conf="hadoop2->master"/>
     <dependency org="org.fusesource.leveldbjni" name="leveldbjni-all" rev="${leveldbjni.version}"
-      conf="hadoop23->master"/>
-    <dependency org="org.cloudera.htrace" name="htrace-core" rev="2.00" conf="hbase95->master">
+      conf="hadoop2->master"/>
+    <dependency org="org.cloudera.htrace" name="htrace-core" rev="2.00" conf="hbase1->master">
       <artifact name="htrace-core" type="jar"/>
     </dependency>
-    <dependency org="com.lmax" name="disruptor" rev="3.3.0" conf="hbase95->master"/>
+    <dependency org="com.lmax" name="disruptor" rev="3.3.0" conf="hbase1->master"/>
 
     <!-- for TestHBaseStorage -->
     <dependency org="com.github.stephenc.high-scale-lib" name="high-scale-lib" rev="${high-scale-lib.version}"
@@ -430,9 +393,7 @@
     <dependency org="org.apache.hive" name="hive-contrib" rev="${hive.version}" changing="true"
                 conf="test->master" />
     <dependency org="org.apache.hive.shims" name="hive-shims-0.23" rev="${hive.version}" changing="true"
-      conf="hadoop23->master" />
-    <dependency org="org.apache.hive.shims" name="hive-shims-0.20S" rev="${hive.version}" changing="true"
-      conf="hadoop20->master" />
+      conf="hadoop2->master" />
     <dependency org="org.iq80.snappy" name="snappy" rev="${snappy.version}"
       conf="test->master" />
     <dependency org="com.esotericsoftware.kryo" name="kryo" rev="${kryo.version}"
@@ -450,31 +411,31 @@
 
     <!-- for Tez integration -->
     <dependency org="org.apache.tez" name="tez" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-common" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-api" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-dag" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-runtime-internals" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-runtime-library" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-mapreduce" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.tez" name="tez-yarn-timeline-history-with-acls" rev="${tez.version}"
-       conf="hadoop23->master"/>
+       conf="hadoop2->master"/>
     <dependency org="org.apache.commons" name="commons-collections4" rev="${commons-collections4.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.codehaus.jettison" name="jettison" rev="${jettison.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.commons" name="commons-math3" rev="${commons-math3.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.curator" name="curator-framework" rev="${curator.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
     <dependency org="org.apache.curator" name="curator-client" rev="${curator.version}"
-      conf="hadoop23->master"/>
+      conf="hadoop2->master"/>
   </dependencies>
 </ivy-module>
 

Added: pig/trunk/ivy/ant-contrib-1.0b3.jar
URL: http://svn.apache.org/viewvc/pig/trunk/ivy/ant-contrib-1.0b3.jar?rev=1777738&view=auto
==============================================================================
Binary file - no diff available.

Propchange: pig/trunk/ivy/ant-contrib-1.0b3.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: pig/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/pig/trunk/ivy/libraries.properties?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/ivy/libraries.properties (original)
+++ pig/trunk/ivy/libraries.properties Sat Jan  7 00:11:37 2017
@@ -39,14 +39,10 @@ ivy.version=2.2.0
 jasper.version=6.1.14
 groovy.version=2.4.5
 guava.version=11.0
-jersey-core.version=1.8
-hadoop-core.version=1.0.4
-hadoop-test.version=1.0.4
-hadoop-common.version=2.6.0
-hadoop-hdfs.version=2.6.0
-hadoop-mapreduce.version=2.6.0
-hbase94.version=0.94.1
-hbase95.version=0.98.12-${hbase.hadoop.version}
+hadoop-common.version=2.7.3
+hadoop-hdfs.version=2.7.3
+hadoop-mapreduce.version=2.7.3
+hbase1.version=0.98.12-hadoop2
 hsqldb.version=1.8.0.10
 hive.version=1.2.1
 httpcomponents.version=4.1
@@ -73,6 +69,7 @@ antlr.version=3.4
 stringtemplate.version=4.0.4
 log4j.version=1.2.16
 netty.version=3.6.6.Final
+netty-all.version=4.0.23.Final
 rats-lib.version=0.5.1
 slf4j-api.version=1.6.1
 slf4j-log4j12.version=1.6.1

Added: pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java?rev=1777738&view=auto
==============================================================================
--- pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java (added)
+++ pig/trunk/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java Sat Jan  7 00:11:37 2017
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.backend.hadoop.executionengine.shims;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.ContextFactory;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+
+public class HadoopShims {
+
+    private static Log LOG = LogFactory.getLog(HadoopShims.class);
+
+    static public JobContext cloneJobContext(JobContext original) throws IOException, InterruptedException {
+        JobContext newContext = ContextFactory.cloneContext(original,
+                new JobConf(original.getConfiguration()));
+        return newContext;
+    }
+
+    static public TaskAttemptContext createTaskAttemptContext(Configuration conf,
+            TaskAttemptID taskId) {
+        if (conf instanceof JobConf) {
+            return new TaskAttemptContextImpl(new JobConf(conf), taskId);
+        } else {
+            return new TaskAttemptContextImpl(conf, taskId);
+        }
+    }
+
+    static public JobContext createJobContext(Configuration conf,
+            JobID jobId) {
+        if (conf instanceof JobConf) {
+            return new JobContextImpl(new JobConf(conf), jobId);
+        } else {
+            return new JobContextImpl(conf, jobId);
+        }
+    }
+
+    static public boolean isMap(TaskAttemptID taskAttemptID) {
+        TaskType type = taskAttemptID.getTaskType();
+        if (type==TaskType.MAP)
+            return true;
+
+        return false;
+    }
+
+    static public TaskAttemptID getNewTaskAttemptID() {
+        TaskAttemptID taskAttemptID = new TaskAttemptID("", 1, TaskType.MAP,
+                1, 1);
+        return taskAttemptID;
+    }
+
+    static public TaskAttemptID createTaskAttemptID(String jtIdentifier, int jobId, boolean isMap,
+            int taskId, int id) {
+        if (isMap) {
+            return new TaskAttemptID(jtIdentifier, jobId, TaskType.MAP, taskId, id);
+        } else {
+            return new TaskAttemptID(jtIdentifier, jobId, TaskType.REDUCE, taskId, id);
+        }
+    }
+
+    /**
+     * Returns whether the give path has a FileSystem implementation.
+     *
+     * @param path path
+     * @param conf configuration
+     * @return true if the give path's scheme has a FileSystem implementation,
+     *         false otherwise
+     */
+    public static boolean hasFileSystemImpl(Path path, Configuration conf) {
+        String scheme = path.toUri().getScheme();
+        if (scheme != null) {
+            // Hadoop 0.23
+            if (conf.get("fs.file.impl") != null) {
+                String fsImpl = conf.get("fs." + scheme + ".impl");
+                if (fsImpl == null) {
+                    return false;
+                }
+            } else {
+                try {
+                    Object fs = FileSystem.getFileSystemClass(scheme,conf);
+                    return fs == null ? false : true;
+                } catch (Exception e) {
+                    return false;
+                }
+            }
+        }
+        return true;
+    }
+}

Modified: pig/trunk/src/docs/src/documentation/content/xdocs/start.xml
URL: http://svn.apache.org/viewvc/pig/trunk/src/docs/src/documentation/content/xdocs/start.xml?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/docs/src/documentation/content/xdocs/start.xml (original)
+++ pig/trunk/src/docs/src/documentation/content/xdocs/start.xml Sat Jan  7 00:11:37 2017
@@ -34,7 +34,7 @@
  <p><strong>Mandatory</strong></p>
       <p>Unix and Windows users need the following:</p>
 		<ul>
-		  <li> <strong>Hadoop 0.23.X, 1.X or 2.X</strong> - <a href="http://hadoop.apache.org/common/releases.html">http://hadoop.apache.org/common/releases.html</a> (You can run Pig with different versions of Hadoop by setting HADOOP_HOME to point to the directory where you have installed Hadoop. If you do not set HADOOP_HOME, by default Pig will run with the embedded version, currently Hadoop 1.0.4.)</li>
+		  <li> <strong>Hadoop 2.X</strong> - <a href="http://hadoop.apache.org/common/releases.html">http://hadoop.apache.org/common/releases.html</a> (You can run Pig with different versions of Hadoop by setting HADOOP_HOME to point to the directory where you have installed Hadoop. If you do not set HADOOP_HOME, by default Pig will run with the embedded version, currently Hadoop 2.7.3.)</li>
 		  <li> <strong>Java 1.7</strong> - <a href="http://java.sun.com/javase/downloads/index.jsp">http://java.sun.com/javase/downloads/index.jsp</a> (set JAVA_HOME to the root of your Java installation)</li>	
 		</ul>
 		<p></p>
@@ -82,7 +82,6 @@ Test the Pig installation with this simp
 	  <li> Build the code from the top directory: <code>ant</code> <br></br>
 	  If the build is successful, you should see the pig.jar file created in that directory. </li>	
 	  <li> Validate the pig.jar  by running a unit test: <code>ant test</code></li>
-	  <li> If you are using Hadoop 0.23.X or 2.X, please add -Dhadoopversion=23 in your ant command line in the previous steps</li>
      </ol>
  </section>
 </section>

Modified: pig/trunk/src/docs/src/documentation/content/xdocs/tabs.xml
URL: http://svn.apache.org/viewvc/pig/trunk/src/docs/src/documentation/content/xdocs/tabs.xml?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/docs/src/documentation/content/xdocs/tabs.xml (original)
+++ pig/trunk/src/docs/src/documentation/content/xdocs/tabs.xml Sat Jan  7 00:11:37 2017
@@ -32,6 +32,6 @@
   -->
   <tab label="Project" href="http://hadoop.apache.org/pig/" type="visible" /> 
   <tab label="Wiki" href="http://wiki.apache.org/pig/" type="visible" /> 
-  <tab label="Pig 0.16.0 Documentation" dir="" type="visible" /> 
+  <tab label="Pig 0.17.0 Documentation" dir="" type="visible" />
 
 </tabs>

Copied: pig/trunk/src/org/apache/pig/backend/hadoop/PigJobControl.java (from r1777730, pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop23/PigJobControl.java)
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/PigJobControl.java?p2=pig/trunk/src/org/apache/pig/backend/hadoop/PigJobControl.java&p1=pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop23/PigJobControl.java&r1=1777730&r2=1777738&rev=1777738&view=diff
==============================================================================
--- pig/trunk/shims/src/hadoop23/org/apache/pig/backend/hadoop23/PigJobControl.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/PigJobControl.java Sat Jan  7 00:11:37 2017
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.pig.backend.hadoop23;
+package org.apache.pig.backend.hadoop;
 
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/AbstractAccumuloStorage.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/AbstractAccumuloStorage.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/AbstractAccumuloStorage.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/AbstractAccumuloStorage.java Sat Jan  7 00:11:37 2017
@@ -17,8 +17,6 @@
 package org.apache.pig.backend.hadoop.accumulo;
 
 import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.util.Collection;
@@ -303,24 +301,8 @@ public abstract class AbstractAccumuloSt
      */
     protected void simpleUnset(Configuration conf,
             Map<String, String> entriesToUnset) {
-        try {
-            Method unset = conf.getClass().getMethod("unset", String.class);
-
-            for (String key : entriesToUnset.keySet()) {
-                unset.invoke(conf, key);
-            }
-        } catch (NoSuchMethodException e) {
-            log.error("Could not invoke Configuration.unset method", e);
-            throw new RuntimeException(e);
-        } catch (IllegalAccessException e) {
-            log.error("Could not invoke Configuration.unset method", e);
-            throw new RuntimeException(e);
-        } catch (IllegalArgumentException e) {
-            log.error("Could not invoke Configuration.unset method", e);
-            throw new RuntimeException(e);
-        } catch (InvocationTargetException e) {
-            log.error("Could not invoke Configuration.unset method", e);
-            throw new RuntimeException(e);
+        for (String key : entriesToUnset.keySet()) {
+            conf.unset(key);
         }
     }
 

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/Utils.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/Utils.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/Utils.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/accumulo/Utils.java Sat Jan  7 00:11:37 2017
@@ -22,8 +22,6 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
 import java.net.URL;
 import java.net.URLDecoder;
 import java.text.MessageFormat;
@@ -42,6 +40,7 @@ import java.util.zip.ZipOutputStream;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.JarFinder;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.log4j.Logger;
 
@@ -112,7 +111,7 @@ public class Utils {
         // attempt to locate an existing jar for the class.
         String jar = findContainingJar(my_class, packagedClasses);
         if (null == jar || jar.isEmpty()) {
-            jar = getJar(my_class);
+            jar = JarFinder.getJar(my_class);
             updateMap(jar, packagedClasses);
         }
 
@@ -200,41 +199,6 @@ public class Utils {
     }
 
     /**
-     * Invoke 'getJar' on a JarFinder implementation. Useful for some job
-     * configuration contexts (HBASE-8140) and also for testing on MRv2. First
-     * check if we have HADOOP-9426. Lacking that, fall back to the backport.
-     * 
-     * @param my_class
-     *            the class to find.
-     * @return a jar file that contains the class, or null.
-     */
-    private static String getJar(Class<?> my_class) {
-        String ret = null;
-        String hadoopJarFinder = "org.apache.hadoop.util.JarFinder";
-        Class<?> jarFinder = null;
-        try {
-            log.debug("Looking for " + hadoopJarFinder + ".");
-            jarFinder = Class.forName(hadoopJarFinder);
-            log.debug(hadoopJarFinder + " found.");
-            Method getJar = jarFinder.getMethod("getJar", Class.class);
-            ret = (String) getJar.invoke(null, my_class);
-        } catch (ClassNotFoundException e) {
-            log.debug("Using backported JarFinder.");
-            ret = jarFinderGetJar(my_class);
-        } catch (InvocationTargetException e) {
-            // function was properly called, but threw it's own exception.
-            // Unwrap it
-            // and pass it on.
-            throw new RuntimeException(e.getCause());
-        } catch (Exception e) {
-            // toss all other exceptions, related to reflection failure
-            throw new RuntimeException("getJar invocation failed.", e);
-        }
-
-        return ret;
-    }
-
-    /**
      * Returns the full path to the Jar containing the class. It always return a
      * JAR.
      * 

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java Sat Jan  7 00:11:37 2017
@@ -29,7 +29,6 @@ import org.apache.pig.ExecType;
 import org.apache.pig.PigConstants;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapReduce;
-import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
 import org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil;
 
 public class ConfigurationUtil {
@@ -89,7 +88,7 @@ public class ConfigurationUtil {
             // so build/classes/hadoop-site.xml contains such entry. This prevents some tests from
             // successful (They expect those files in hdfs), so we need to unset it in hadoop 23.
             // This should go away once MiniMRCluster fix the distributed cache issue.
-            HadoopShims.unsetConf(localConf, MRConfiguration.JOB_CACHE_FILES);
+            localConf.unset(MRConfiguration.JOB_CACHE_FILES);
         }
         localConf.set(MapRedUtil.FILE_SYSTEM_NAME, "file:///");
         Properties props = ConfigurationUtil.toProperties(localConf);

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/Launcher.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/Launcher.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/Launcher.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/Launcher.java Sat Jan  7 00:11:37 2017
@@ -32,7 +32,8 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.TaskReport;
+import org.apache.hadoop.mapred.TIPStatus;
+import org.apache.hadoop.mapreduce.TaskReport;
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapred.jobcontrol.JobControl;
 import org.apache.pig.FuncSpec;
@@ -40,7 +41,6 @@ import org.apache.pig.PigException;
 import org.apache.pig.backend.BackendException;
 import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
-import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
 import org.apache.pig.impl.PigContext;
 import org.apache.pig.impl.io.FileSpec;
 import org.apache.pig.impl.plan.PlanException;
@@ -177,7 +177,7 @@ public abstract class Launcher {
             String exceptionCreateFailMsg = null;
             boolean jobFailed = false;
             if (msgs.length > 0) {
-                if (HadoopShims.isJobFailed(report)) {
+                if (report.getCurrentStatus()== TIPStatus.FAILED) {
                     jobFailed = true;
                 }
                 Set<String> errorMessageSet = new HashSet<String>();
@@ -259,11 +259,30 @@ public abstract class Launcher {
 
         List<Job> runnJobs = jc.getRunningJobs();
         for (Job j : runnJobs) {
-            prog += HadoopShims.progressOfRunningJob(j);
+            prog += progressOfRunningJob(j);
         }
         return prog;
     }
 
+    /**
+     * Returns the progress of a Job j which is part of a submitted JobControl
+     * object. The progress is for this Job. So it has to be scaled down by the
+     * num of jobs that are present in the JobControl.
+     *
+     * @param j The Job for which progress is required
+     * @return Returns the percentage progress of this Job
+     * @throws IOException
+     */
+    private static double progressOfRunningJob(Job j)
+            throws IOException {
+        org.apache.hadoop.mapreduce.Job mrJob = j.getJob();
+        try {
+            return (mrJob.mapProgress() + mrJob.reduceProgress()) / 2;
+        } catch (Exception ir) {
+            return 0;
+        }
+    }
+
     public long getTotalHadoopTimeSpent() {
         return totalHadoopTimeSpent;
     }

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchLauncher.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchLauncher.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchLauncher.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchLauncher.java Sat Jan  7 00:11:37 2017
@@ -25,6 +25,7 @@ import org.apache.hadoop.mapreduce.TaskA
 import org.apache.pig.PigException;
 import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigHadoopLogger;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapReduce;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.UDFFinishVisitor;
@@ -122,7 +123,8 @@ public class FetchLauncher {
         poStore.setUp();
 
         TaskAttemptID taskAttemptID = HadoopShims.getNewTaskAttemptID();
-        HadoopShims.setTaskAttemptId(conf, taskAttemptID);
+        //Fetch mode needs to explicitly set the task id which is otherwise done by Hadoop
+        conf.setInt(MRConfiguration.JOB_APPLICATION_ATTEMPT_ID, taskAttemptID.getId());
 
         if (!PlanHelper.getPhysicalOperators(pp, POStream.class).isEmpty()) {
             MapRedUtil.setupStreamingDirsConfSingle(poStore, pigContext, conf);

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchPOStoreImpl.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchPOStoreImpl.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchPOStoreImpl.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchPOStoreImpl.java Sat Jan  7 00:11:37 2017
@@ -95,7 +95,7 @@ public class FetchPOStoreImpl extends PO
         }
         if (outputCommitter.needsTaskCommit(context))
             outputCommitter.commitTask(context);
-        HadoopShims.commitOrCleanup(outputCommitter, context);
+        outputCommitter.commitJob(context);
     }
 
     @Override
@@ -109,7 +109,7 @@ public class FetchPOStoreImpl extends PO
             }
             writer = null;
         }
-        HadoopShims.commitOrCleanup(outputCommitter, context);
+        outputCommitter.commitJob(context);
     }
 
 }

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java Sat Jan  7 00:11:37 2017
@@ -24,7 +24,6 @@ import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.lang.reflect.Method;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
@@ -61,6 +60,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobPriority;
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapred.jobcontrol.JobControl;
+import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
 import org.apache.pig.ComparisonFunc;
 import org.apache.pig.ExecType;
 import org.apache.pig.FuncSpec;
@@ -71,6 +71,7 @@ import org.apache.pig.PigException;
 import org.apache.pig.StoreFuncInterface;
 import org.apache.pig.backend.executionengine.ExecException;
 import org.apache.pig.backend.hadoop.HDataType;
+import org.apache.pig.backend.hadoop.PigJobControl;
 import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
 import org.apache.pig.backend.hadoop.executionengine.JobCreationException;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.partitioners.SecondaryKeyPartitioner;
@@ -89,7 +90,6 @@ import org.apache.pig.backend.hadoop.exe
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
 import org.apache.pig.backend.hadoop.executionengine.physicalLayer.util.PlanHelper;
-import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
 import org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil;
 import org.apache.pig.data.BagFactory;
 import org.apache.pig.data.DataType;
@@ -122,6 +122,7 @@ import org.apache.pig.impl.util.ObjectSe
 import org.apache.pig.impl.util.Pair;
 import org.apache.pig.impl.util.UDFContext;
 import org.apache.pig.impl.util.Utils;
+import org.apache.pig.tools.pigstats.mapreduce.MRJobStats;
 import org.apache.pig.tools.pigstats.mapreduce.MRPigStatsUtil;
 import org.apache.pig.tools.pigstats.mapreduce.MRScriptState;
 
@@ -311,7 +312,7 @@ public class JobControlCompiler{
                     " should be a time in ms. default=" + defaultPigJobControlSleep, e);
         }
 
-        JobControl jobCtrl = HadoopShims.newJobControl(grpName, timeToSleep);
+        JobControl jobCtrl = new PigJobControl(grpName, timeToSleep);
 
         try {
             List<MapReduceOper> roots = new LinkedList<MapReduceOper>();
@@ -384,7 +385,7 @@ public class JobControlCompiler{
         ArrayList<Pair<String,Long>> counterPairs;
 
         try {
-            counters = HadoopShims.getCounters(job);
+            counters = MRJobStats.getCounters(job);
 
             String groupName = getGroupName(counters.getGroupNames());
             // In case that the counter group was not find, we need to find
@@ -1672,14 +1673,6 @@ public class JobControlCompiler{
         if (distCachePath != null) {
             log.info("Jar file " + url + " already in DistributedCache as "
                     + distCachePath + ". Not copying to hdfs and adding again");
-            // Path already in dist cache
-            if (!HadoopShims.isHadoopYARN()) {
-                // Mapreduce in YARN includes $PWD/* which will add all *.jar files in classapth.
-                // So don't have to ensure that the jar is separately added to mapreduce.job.classpath.files
-                // But path may only be in 'mapred.cache.files' and not be in
-                // 'mapreduce.job.classpath.files' in Hadoop 1.x. So adding it there
-                DistributedCache.addFileToClassPath(distCachePath, conf, distCachePath.getFileSystem(conf));
-            }
         }
         else {
             // REGISTER always copies locally the jar file. see PigServer.registerJar()
@@ -1965,20 +1958,9 @@ public class JobControlCompiler{
 
     public static void setOutputFormat(org.apache.hadoop.mapreduce.Job job) {
         // the OutputFormat we report to Hadoop is always PigOutputFormat which
-        // can be wrapped with LazyOutputFormat provided if it is supported by
-        // the Hadoop version and PigConfiguration.PIG_OUTPUT_LAZY is set
+        // can be wrapped with LazyOutputFormat provided if PigConfiguration.PIG_OUTPUT_LAZY is set
         if ("true".equalsIgnoreCase(job.getConfiguration().get(PigConfiguration.PIG_OUTPUT_LAZY))) {
-            try {
-                Class<?> clazz = PigContext
-                        .resolveClassName("org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat");
-                Method method = clazz.getMethod("setOutputFormatClass",
-                        org.apache.hadoop.mapreduce.Job.class, Class.class);
-                method.invoke(null, job, PigOutputFormat.class);
-            } catch (Exception e) {
-                job.setOutputFormatClass(PigOutputFormat.class);
-                log.warn(PigConfiguration.PIG_OUTPUT_LAZY
-                        + " is set but LazyOutputFormat couldn't be loaded. Default PigOutputFormat will be used");
-            }
+            LazyOutputFormat.setOutputFormatClass(job,PigOutputFormat.class);
         } else {
             job.setOutputFormatClass(PigOutputFormat.class);
         }

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java Sat Jan  7 00:11:37 2017
@@ -1278,7 +1278,7 @@ public class MRCompiler extends PhyPlanV
                             List<InputSplit> splits = inf.getSplits(HadoopShims.cloneJobContext(job));
                             List<List<InputSplit>> results = MapRedUtil
                             .getCombinePigSplits(splits,
-                                    HadoopShims.getDefaultBlockSize(fs, path),
+                                    fs.getDefaultBlockSize(path),
                                     conf);
                             numFiles += results.size();
                         } else {

Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java?rev=1777738&r1=1777737&r2=1777738&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java Sat Jan  7 00:11:37 2017
@@ -42,7 +42,8 @@ import org.apache.hadoop.mapred.JobClien
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.RunningJob;
-import org.apache.hadoop.mapred.TaskReport;
+import org.apache.hadoop.mapreduce.Cluster;
+import org.apache.hadoop.mapreduce.TaskReport;
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.pig.PigConfiguration;
@@ -81,9 +82,12 @@ import org.apache.pig.impl.util.Utils;
 import org.apache.pig.tools.pigstats.OutputStats;
 import org.apache.pig.tools.pigstats.PigStats;
 import org.apache.pig.tools.pigstats.PigStatsUtil;
+import org.apache.pig.tools.pigstats.mapreduce.MRJobStats;
 import org.apache.pig.tools.pigstats.mapreduce.MRPigStatsUtil;
 import org.apache.pig.tools.pigstats.mapreduce.MRScriptState;
 
+import org.python.google.common.collect.Lists;
+
 
 /**
  * Main class that launches pig for Map Reduce
@@ -109,7 +113,14 @@ public class MapReduceLauncher extends L
             if (jc != null && jc.getRunningJobs().size() > 0) {
                 log.info("Received kill signal");
                 for (Job job : jc.getRunningJobs()) {
-                    HadoopShims.killJob(job);
+                    org.apache.hadoop.mapreduce.Job mrJob = job.getJob();
+                    try {
+                        if (mrJob != null) {
+                            mrJob.killJob();
+                        }
+                    } catch (Exception ir) {
+                        throw new IOException(ir);
+                    }
                     log.info("Job " + job.getAssignedJobID() + " killed");
                     String timeStamp = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
                             .format(Calendar.getInstance().getTime());
@@ -332,11 +343,6 @@ public class MapReduceLauncher extends L
                                 log.info("detailed locations: " + aliasLocation);
                             }
 
-                            if (!HadoopShims.isHadoopYARN() && jobTrackerLoc != null) {
-                                log.info("More information at: http://" + jobTrackerLoc
-                                        + "/jobdetails.jsp?jobid=" + job.getAssignedJobID());
-                            }
-
                             // update statistics for this job so jobId is set
                             MRPigStatsUtil.addJobStats(job);
                             MRScriptState.get().emitJobStartedNotification(
@@ -486,10 +492,6 @@ public class MapReduceLauncher extends L
             for (Job job : succJobs) {
                 List<POStore> sts = jcc.getStores(job);
                 for (POStore st : sts) {
-                    if (Utils.isLocal(pc, job.getJobConf())) {
-                        HadoopShims.storeSchemaForLocal(job, st);
-                    }
-
                     if (!st.isTmpStore()) {
                         // create an "_SUCCESS" file in output location if
                         // output location is a filesystem dir
@@ -755,7 +757,7 @@ public class MapReduceLauncher extends L
     @SuppressWarnings("deprecation")
     void computeWarningAggregate(Job job, Map<Enum, Long> aggMap) {
         try {
-            Counters counters = HadoopShims.getCounters(job);
+            Counters counters = MRJobStats.getCounters(job);
             if (counters==null)
             {
                 long nullCounterCount =
@@ -809,13 +811,13 @@ public class MapReduceLauncher extends L
             throw new ExecException(backendException);
         }
         try {
-            Iterator<TaskReport> mapRep = HadoopShims.getTaskReports(job, TaskType.MAP);
+            Iterator<TaskReport> mapRep = MRJobStats.getTaskReports(job, TaskType.MAP);
             if (mapRep != null) {
                 getErrorMessages(mapRep, "map", errNotDbg, pigContext);
                 totalHadoopTimeSpent += computeTimeSpent(mapRep);
                 mapRep = null;
             }
-            Iterator<TaskReport> redRep = HadoopShims.getTaskReports(job, TaskType.REDUCE);
+            Iterator<TaskReport> redRep = MRJobStats.getTaskReports(job, TaskType.REDUCE);
             if (redRep != null) {
                 getErrorMessages(redRep, "reduce", errNotDbg, pigContext);
                 totalHadoopTimeSpent += computeTimeSpent(redRep);
@@ -833,5 +835,6 @@ public class MapReduceLauncher extends L
             throw new ExecException(e);
         }
     }
+
 }