You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2011/08/10 19:55:57 UTC
svn commit: r1156275 - in /pig/trunk: CHANGES.txt bin/pig build.xml
Author: daijy
Date: Wed Aug 10 17:55:56 2011
New Revision: 1156275
URL: http://svn.apache.org/viewvc?rev=1156275&view=rev
Log:
PIG-2183: Pig not working with Hadoop 0.20.203.0
Modified:
pig/trunk/CHANGES.txt
pig/trunk/bin/pig
pig/trunk/build.xml
Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1156275&r1=1156274&r2=1156275&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Wed Aug 10 17:55:56 2011
@@ -103,6 +103,8 @@ PIG-2011: Speed up TestTypedMap.java (dv
BUG FIXES
+PIG-2183: Pig not working with Hadoop 0.20.203.0 (daijy)
+
PIG-2090: re-enable TestGrunt test cases (thejas)
PIG-2181: Improvement : for error message when describe misses alias (vivekp via daijy)
Modified: pig/trunk/bin/pig
URL: http://svn.apache.org/viewvc/pig/trunk/bin/pig?rev=1156275&r1=1156274&r2=1156275&view=diff
==============================================================================
--- pig/trunk/bin/pig (original)
+++ pig/trunk/bin/pig Wed Aug 10 17:55:56 2011
@@ -25,6 +25,8 @@
#
# PIG_CLASSPATH Extra Java CLASSPATH entries.
#
+# HADOOP_HOME Environment HADOOP_HOME
+#
# PIG_HEAPSIZE The maximum amount of heap to use, in MB.
# Default is 1000.
#
@@ -113,39 +115,36 @@ if [ "$PIG_CLASSPATH" != "" ]; then
CLASSPATH=${CLASSPATH}:${PIG_CLASSPATH}
fi
-# for developers, add Pig classes to CLASSPATH
-if [ -d "$PIG_HOME/build/classes" ]; then
- CLASSPATH=${CLASSPATH}:$PIG_HOME/build/classes
-fi
-if [ -d "$PIG_HOME/build/test/classes" ]; then
- CLASSPATH=${CLASSPATH}:$PIG_HOME/build/test/classes
-fi
-
# so that filenames w/ spaces are handled correctly in loops below
IFS=
-# for releases, add core pig to CLASSPATH
-for f in $PIG_HOME/share/pig/pig-*-core.jar; do
- CLASSPATH=${CLASSPATH}:$f;
+shopt -s nullglob
+# for releases tarball, add core pig to CLASSPATH
+for f in $PIG_HOME/pig-*-core.jar; do
+ PIG_JAR=$f;
done
-# during development pig jar might be in build
-for f in $PIG_HOME/build/pig-*-SNAPSHOT.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# locate Hadoop
-# For Hadoop 0.20.203+
-#
-if [ -d "${PIG_HOME}/share/hadoop" ]; then
- for f in ${PIG_HOME}/share/hadoop/hadoop*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
+# for development/source release, add pig-withouthadoop.jar
+if [ ${#PIG_JAR} -eq 0 ]; then
+ for f in $PIG_HOME/pig-*withouthadoop.jar; do
+ PIG_JAR=$f;
done
- for f in ${PIG_HOME}/share/hadoop/lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
+fi
+
+# for deb/rpm package, add pig jar in /usr/share/pig
+if [ ${#PIG_JAR} -eq 0 ]; then
+ for f in $PIG_HOME/share/pig/pig-*withouthadoop.jar; do
+ PIG_JAR=$f;
done
fi
+shopt -u nullglob
+if [ ${#PIG_JAR} -ne 0 ]; then
+ CLASSPATH=${CLASSPATH}:$PIG_JAR
+else
+ echo "Cannot locate pig jar. do 'ant jar-withouthadoop', and try again"
+ exit 1
+fi
# For Hadoop 0.23.0+
#
@@ -167,6 +166,55 @@ fi
# done
#fi
+# Set the version for Hadoop, default to 20
+PIG_HADOOP_VERSION="${PIG_HADOOP_VERSION:-20}"
+# add libs to CLASSPATH. There can be more than one version of the hadoop
+# libraries in the lib dir, so don't blindly add them all. Only add the one
+# that matche PIG_HADOOP_VERSION.
+if [[ ${#HADOOP_HOME} -ne 0 && -d ${HADOOP_HOME} ]]; then
+ HADOOP_LIB_DIR=$HADOOP_HOME
+ if [ -f $HADOOP_HOME/bin/hadoop ]; then
+ HADOOP_BIN=$HADOOP_HOME/bin/hadoop
+ fi
+elif [ -d "$PIG_HOME/build/ivy/lib/Pig" ]; then
+ HADOOP_LIB_DIR=$PIG_HOME/build/ivy/lib/Pig
+else
+ HADOOP_LIB_DIR=$PIG_HOME/lib
+fi
+
+if [ ${#HADOOP_BIN} -eq 0 ]; then
+ if [ -f /usr/bin/hadoop ]; then
+ HADOOP_BIN=/usr/bin/hadoop
+ fi
+fi
+
+HADOOP_CLASSPATH=`$HADOOP_BIN classpath 2>/dev/null`
+if [ ${#HADOOP_CLASSPATH} -eq 0 ]; then
+ for f in $HADOOP_LIB_DIR/*.jar; do
+ filename=`basename $f`
+ IS_HADOOP=`echo $filename | grep hadoop`
+ if [ ${#IS_HADOOP} -ne 0 ]; then
+ HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f;
+ else
+ IS_RIGHT_VER=`echo $f | grep hadoop-0\.${PIG_HADOOP_VERSION}\.*.jar | grep core`
+ if [ ${#IS_RIGHT_VER} -ne 0 ]; then
+ HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f;
+ fi
+ fi
+ done
+ if [ -d ${HADOOP_HOME} ]; then
+ HADOOP_CLASSPATH=${HADOOP_HOME}/conf:${HADOOP_CLASSPATH}
+ fi
+fi
+
+CLASSPATH=${CLASSPATH}:$HADOOP_CLASSPATH
+
+# if using HBase, likely want to include HBase config
+HBASE_CONF_DIR=${HBASE_CONF_DIR:-/etc/hbase}
+if [ -n "$HBASE_CONF_DIR" ] && [ -d "$HBASE_CONF_DIR" ]; then
+ CLASSPATH=$HBASE_CONF_DIR:$CLASSPATH
+fi
+
if [ -d "${PIG_HOME}/etc/hadoop" ]; then
CLASSPATH=${CLASSPATH}:${PIG_HOME}/etc/hadoop;
fi
@@ -185,30 +233,6 @@ if [ -d "${PIG_HOME}/share/hbase" ]; the
done
fi
-# Set the version for Hadoop, default to 20
-PIG_HADOOP_VERSION="${PIG_HADOOP_VERSION:-20}"
-# add libs to CLASSPATH. There can be more than one version of the hadoop
-# libraries in the lib dir, so don't blindly add them all. Only add the one
-# that matche PIG_HADOOP_VERSION.
-for f in $PIG_HOME/lib/*.jar; do
- filename=`basename $f`
- IS_HADOOP=`echo $filename | grep hadoop`
- if [ "${IS_HADOOP}x" == "x" ]; then
- CLASSPATH=${CLASSPATH}:$f;
- else
- IS_RIGHT_VER=`echo $f | grep hadoop${PIG_HADOOP_VERSION}.jar`
- if [ "${IS_RIGHT_VER}x" != "x" ]; then
- CLASSPATH=${CLASSPATH}:$f;
- fi
- fi
-done
-
-# if using HBase, likely want to include HBase config
-HBASE_CONF_DIR=${HBASE_CONF_DIR:-/etc/hbase}
-if [ -n "$HBASE_CONF_DIR" ] && [ -d "$HBASE_CONF_DIR" ]; then
- CLASSPATH=$HBASE_CONF_DIR:$CLASSPATH
-fi
-
# default log directory & file
if [ "$PIG_LOG_DIR" = "" ]; then
PIG_LOG_DIR="$PIG_HOME/logs"
Modified: pig/trunk/build.xml
URL: http://svn.apache.org/viewvc/pig/trunk/build.xml?rev=1156275&r1=1156274&r2=1156275&view=diff
==============================================================================
--- pig/trunk/build.xml (original)
+++ pig/trunk/build.xml Wed Aug 10 17:55:56 2011
@@ -448,7 +448,7 @@
</javadoc>
</target>
- <target name="javadoc-all" depends="jar, ivy-javadoc" description="Create documentation including all contrib projects">
+ <target name="javadoc-all" depends="jar-withouthadoop, ivy-javadoc" description="Create documentation including all contrib projects">
<mkdir dir="${build.javadoc}" />
<javadoc overview="${src.dir}/overview.html" packagenames="org.apache.pig*,org.apache.hadoop.zebra*" destdir="${build.javadoc}" author="true" version="true" use="true" windowtitle="${Name} ${version} API" doctitle="${Name} ${version} API" bottom="Copyright &copy; ${year} The Apache Software Foundation">
<packageset dir="${src.dir}" />
@@ -766,7 +766,7 @@
<!-- ================================================================== -->
<!-- D I S T R I B U T I O N -->
<!-- ================================================================== -->
- <target name="package" depends="docs, api-report" description="Create a Pig release">
+ <target name="package-release" depends="docs, api-report" description="Create a Pig release for rpm/deb distribution">
<mkdir dir="${dist.dir}" />
<mkdir dir="${dist.dir}/etc/pig" />
<mkdir dir="${dist.dir}/share/pig/scripts" />
@@ -776,12 +776,13 @@
<mkdir dir="${dist.dir}/share/doc/pig/jdiff"/>
<mkdir dir="${dist.dir}/share/doc/pig/license" />
- <copy todir="${dist.dir}/share/pig/lib" includeEmptyDirs="false">
- <!--fileset dir="${ivy.lib.dir}"/-->
- <fileset dir="${lib.dir}"/>
+ <copy file="${ivy.lib.dir}/hadoop-core-${hadoop-core.version}.jar" todir="${dist.dir}/lib"/>
+
+ <copy todir="${dist.dir}/lib/jdiff" includeEmptyDirs="false">
+ <fileset dir="${lib.dir}/jdiff"/>
</copy>
- <copy file="${output.jarfile.backcompat}" tofile="${dist.dir}/share/pig/${final.name}-core.jar" />
+ <copy file="${output.jarfile.backcompat.withouthadoop}" tofile="${dist.dir}/share/pig/${final.name}-withouthadoop.jar" />
<copy todir="${dist.dir}/bin">
<fileset dir="bin" />
@@ -827,25 +828,97 @@
</target>
+ <target name="package" depends="docs, api-report" description="Create a Pig release">
+ <mkdir dir="${dist.dir}" />
+ <mkdir dir="${dist.dir}/lib" />
+ <mkdir dir="${dist.dir}/conf" />
+ <mkdir dir="${dist.dir}/scripts" />
+ <mkdir dir="${dist.dir}/docs" />
+ <mkdir dir="${dist.dir}/docs/api" />
+ <mkdir dir="${dist.dir}/docs/jdiff"/>
+ <mkdir dir="${dist.dir}/license" />
+
+ <copy file="${ivy.lib.dir}/hadoop-core-${hadoop-core.version}.jar" todir="${dist.dir}/lib"/>
+
+ <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
+ <fileset dir="${lib.dir}"/>
+ </copy>
+
+ <copy file="${output.jarfile.backcompat.withouthadoop}" tofile="${dist.dir}/${final.name}-withouthadoop.jar" />
+
+ <copy todir="${dist.dir}/" file="ivy.xml" />
+
+ <copy todir="${dist.dir}/ivy">
+ <fileset dir="ivy" />
+ </copy>
+
+ <copy todir="${dist.dir}/bin">
+ <fileset dir="bin" />
+ </copy>
+
+ <copy todir="${dist.dir}/docs">
+ <fileset dir="${build.docs}" />
+ </copy>
+
+ <copy todir="${dist.dir}/conf" file="conf/pig.properties"/>
+
+ <copy todir="${dist.dir}/src" includeEmptyDirs="true">
+ <fileset dir="${src.dir}" />
+ </copy>
+
+ <copy todir="${dist.dir}/shims" includeEmptyDirs="true">
+ <fileset dir="${basedir}/shims" />
+ </copy>
+
+ <copy todir="${dist.dir}/lib-src" includeEmptyDirs="true">
+ <fileset dir="${src.lib.dir}" />
+ </copy>
+
+ <copy todir="${dist.dir}/test" includeEmptyDirs="true">
+ <fileset dir="${test.src.dir}" />
+ </copy>
+
+ <copy todir="${dist.dir}/tutorial" includeEmptyDirs="true">
+ <fileset dir="tutorial" />
+ </copy>
+
+ <copy todir="${dist.dir}/contrib" includeEmptyDirs="true">
+ <fileset dir="contrib" />
+ </copy>
+
+ <copy todir="${dist.dir}/" file="build.xml" />
+
+ <copy todir="${dist.dir}">
+ <fileset dir=".">
+ <include name="*.txt" />
+ </fileset>
+ </copy>
+
+ <copy todir="${dist.dir}/license">
+ <fileset dir="license" />
+ </copy>
+
+ <chmod perm="ugo+x" type="file">
+ <fileset dir="${dist.dir}/bin" />
+ </chmod>
+ </target>
+
<!-- ================================================================== -->
<!-- Make release packages -->
<!-- ================================================================== -->
- <target name="source" description="Source distribution">
- <mkdir dir="${build.dir}"/>
- <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${name}-source-${version}.tar.gz">
- <tarfileset dir="${basedir}" mode="644">
- <exclude name="bin/*"/>
- <exclude name="build/**"/>
- <exclude name="pig.jar"/>
- <exclude name="src-gen/**"/>
- </tarfileset>
- <tarfileset dir="${basedir}" mode="755">
- <include name="bin/*"/>
- </tarfileset>
- </tar>
+ <target name="tar" depends="package" description="Source distribution">
+ <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${final.name}.tar.gz">
+ <tarfileset dir="${build.dir}" mode="664">
+ <exclude name="${final.name}/bin/*" />
+ <include name="${final.name}/**" />
+ </tarfileset>
+ <tarfileset dir="${build.dir}" mode="755">
+ <include name="${final.name}/bin/*" />
+ </tarfileset>
+ </tar>
</target>
- <target name="tar" depends="package" description="Create release tarball">
+ <target name="tar-release" depends="package-release" description="Create release tarball">
<tar compression="gzip" longfile="gnu" destfile="${build.dir}/${final.name}.tar.gz">
<tarfileset dir="${build.dir}" mode="664">
<exclude name="${final.name}/bin/*" />
@@ -857,7 +930,7 @@
</tar>
</target>
- <target name="rpm" depends="jar-withouthadoop, tar" description="Make rpm package">
+ <target name="rpm" depends="tar-release" description="Make rpm package">
<mkdir dir="${package.buildroot}/BUILD" />
<mkdir dir="${package.buildroot}/RPMS" />
<mkdir dir="${package.buildroot}/SRPMS" />
@@ -894,7 +967,7 @@
<delete dir="${package.buildroot}" quiet="true" verbose="false"/>
</target>
- <target name="deb" depends="jar-withouthadoop, tar" description="Make deb package">
+ <target name="deb" depends="tar-release" description="Make deb package">
<taskdef name="deb"
classname="org.vafer.jdeb.ant.DebAntTask">
<classpath refid="classpath" />