You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by gk...@apache.org on 2009/05/30 14:29:22 UTC
svn commit: r780254 [1/2] - in /hadoop/core/branches/HADOOP-4687/hdfs: ./
ivy/ lib/
Author: gkesavan
Date: Sat May 30 12:29:22 2009
New Revision: 780254
URL: http://svn.apache.org/viewvc?rev=780254&view=rev
Log:
Add lib jars & ivy
Added:
hadoop/core/branches/HADOOP-4687/hdfs/build.xml
hadoop/core/branches/HADOOP-4687/hdfs/ivy/
hadoop/core/branches/HADOOP-4687/hdfs/ivy.xml
hadoop/core/branches/HADOOP-4687/hdfs/ivy/hadoop-core.pom
hadoop/core/branches/HADOOP-4687/hdfs/ivy/ivysettings.xml
hadoop/core/branches/HADOOP-4687/hdfs/ivy/libraries.properties
hadoop/core/branches/HADOOP-4687/hdfs/lib/
hadoop/core/branches/HADOOP-4687/hdfs/lib/hadoop-core-0.21.0-dev.jar (with props)
hadoop/core/branches/HADOOP-4687/hdfs/lib/hadoop-core-test-0.21.0-dev.jar (with props)
hadoop/core/branches/HADOOP-4687/hdfs/lib/hadoop-mapred-0.21.0-dev.jar (with props)
hadoop/core/branches/HADOOP-4687/hdfs/lib/hadoop-mapred-test-0.21.0-dev.jar (with props)
hadoop/core/branches/HADOOP-4687/hdfs/lib/hadoop-mapred-tools-0.21.0-dev.jar (with props)
Added: hadoop/core/branches/HADOOP-4687/hdfs/build.xml
URL: http://svn.apache.org/viewvc/hadoop/core/branches/HADOOP-4687/hdfs/build.xml?rev=780254&view=auto
==============================================================================
--- hadoop/core/branches/HADOOP-4687/hdfs/build.xml (added)
+++ hadoop/core/branches/HADOOP-4687/hdfs/build.xml Sat May 30 12:29:22 2009
@@ -0,0 +1,1917 @@
+<?xml version="1.0"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<project name="Hadoop-Hdfs" default="compile"
+ xmlns:ivy="antlib:org.apache.ivy.ant">
+
+ <!-- Load all the default properties, and any the user wants -->
+ <!-- to contribute (without having to type -D or edit this file -->
+ <property file="${user.home}/build.properties" />
+ <property file="${basedir}/build.properties" />
+
+ <property name="Name" value="Hadoop-Hdfs"/>
+ <property name="name" value="hadoop-hdfs"/>
+ <property name="version" value="0.21.0-dev"/>
+ <property name="final.name" value="${name}-${version}"/>
+ <property name="test.hdfs.final.name" value="${name}-test-${version}"/>
+ <property name="test.hdfswithmr.final.name" value="${name}-hdsfwithmr-test-${version}"/>
+ <property name="test.final.name" value="${name}-test-${version}"/>
+ <property name="year" value="2009"/>
+
+ <property name="src.dir" value="${basedir}/src"/>
+ <property name="hdfs.src.dir" value="${src.dir}/java"/>
+ <property name="anttasks.dir" value="${basedir}/src/ant"/>
+ <property name="lib.dir" value="${basedir}/lib"/>
+ <property name="conf.dir" value="${basedir}/conf"/>
+ <property name="contrib.dir" value="${basedir}/src/contrib"/>
+ <property name="docs.src" value="${basedir}/src/docs"/>
+ <property name="src.docs.cn" value="${basedir}/src/docs/cn"/>
+ <property name="changes.src" value="${docs.src}/changes"/>
+
+ <property name="xercescroot" value=""/>
+ <property name="build.dir" value="${basedir}/build"/>
+ <property name="build.classes" value="${build.dir}/classes"/>
+ <property name="build.src" value="${build.dir}/src"/>
+ <property name="build.webapps" value="${build.dir}/webapps"/>
+ <property name="build.anttasks" value="${build.dir}/ant"/>
+ <!-- convert spaces to _ so that mac os doesn't break things -->
+ <exec executable="sed" inputstring="${os.name}"
+ outputproperty="nonspace.os">
+ <arg value="s/ /_/g"/>
+ </exec>
+ <property name="build.platform"
+ value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
+ <property name="jvm.arch"
+ value="${sun.arch.data.model}"/>
+
+ <property name="build.docs" value="${build.dir}/docs"/>
+ <property name="build.docs.cn" value="${build.dir}/docs/cn"/>
+ <property name="build.javadoc" value="${build.docs}/api"/>
+ <property name="build.javadoc.timestamp" value="${build.javadoc}/index.html" />
+ <property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
+ <property name="build.encoding" value="ISO-8859-1"/>
+
+ <property name="test.src.dir" value="${basedir}/src/test"/>
+ <property name="test.lib.dir" value="${basedir}/src/test/lib"/>
+ <property name="test.build.dir" value="${build.dir}/test"/>
+ <property name="test.generated.dir" value="${test.build.dir}/src"/>
+ <property name="test.build.data" value="${test.build.dir}/data"/>
+ <property name="test.cache.data" value="${test.build.dir}/cache"/>
+ <property name="test.debug.data" value="${test.build.dir}/debug"/>
+ <property name="test.log.dir" value="${test.build.dir}/logs"/>
+ <property name="test.build.classes" value="${test.build.dir}/classes"/>
+ <property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
+ <property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
+ <property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
+ <property name="test.include" value="Test*"/>
+ <property name="test.classpath.id" value="test.classpath"/>
+ <property name="test.output" value="no"/>
+ <property name="test.timeout" value="900000"/>
+ <property name="test.junit.output.format" value="plain"/>
+ <property name="test.junit.fork.mode" value="perTest" />
+ <property name="test.junit.printsummary" value="yes" />
+ <property name="test.junit.haltonfailure" value="no" />
+ <property name="test.junit.maxmemory" value="512m" />
+
+ <property name="test.hdfs.build.classes" value="${test.build.dir}/classes"/>
+ <property name="test.hdfs.with.mr.build.classes" value="${test.build.dir}/hdfs-with-mr/classes"/>
+ <property name="test.hdfs.with.mr.classpath.id" value="test.hdfs.with.mr.classpath"/>
+
+ <property name="web.src.dir" value="${basedir}/src/web"/>
+ <property name="src.webapps" value="${basedir}/src/webapps"/>
+
+ <property name="javadoc.link.java"
+ value="http://java.sun.com/javase/6/docs/api/"/>
+ <property name="javadoc.packages" value="org.apache.hadoop.*"/>
+ <property name="javadoc.maxmemory" value="512m" />
+
+ <property name="dist.dir" value="${build.dir}/${final.name}"/>
+
+ <property name="javac.debug" value="on"/>
+ <property name="javac.optimize" value="on"/>
+ <property name="javac.deprecation" value="off"/>
+ <property name="javac.version" value="1.6"/>
+ <property name="javac.args" value=""/>
+ <property name="javac.args.warnings" value="-Xlint:unchecked"/>
+
+ <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
+ <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
+
+ <property name="rat.reporting.classname" value="rat.Report"/>
+
+ <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
+ <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
+ <property name="jdiff.stable" value="0.20.0"/>
+ <property name="jdiff.stable.javadoc"
+ value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
+
+ <property name="scratch.dir" value="${user.home}/tmp"/>
+ <property name="svn.cmd" value="svn"/>
+ <property name="grep.cmd" value="grep"/>
+ <property name="patch.cmd" value="patch"/>
+ <property name="make.cmd" value="make"/>
+
+ <!-- IVY properteis set here -->
+ <property name="ivy.dir" location="ivy" />
+ <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
+ <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
+ <property name="ivy_repo_url" value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
+ <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
+ <property name="ivy.org" value="org.apache.hadoop"/>
+ <property name="build.dir" location="build" />
+ <property name="dist.dir" value="${build.dir}/${final.name}"/>
+ <property name="build.ivy.dir" location="${build.dir}/ivy" />
+ <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
+ <property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
+ <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
+ <property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
+ <property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/hadoop-core-${version}.pom" />
+ <property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/hadoop-core-${version}.jar" />
+
+ <!--this is the naming policy for artifacts we want pulled down-->
+ <property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
+
+ <!--this is how artifacts that get built are named-->
+ <property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/>
+ <property name="hadoop-hdfs.jar" location="${build.dir}/${final.name}.jar" />
+
+ <!-- jdiff.home property set -->
+ <property name="jdiff.home" value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
+ <property name="jdiff.jar" value="${jdiff.home}/jdiff-${jdiff.version}.jar"/>
+ <property name="xerces.jar" value="${jdiff.home}/xerces-${xerces.version}.jar"/>
+
+ <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
+ <available property="clover.present" file="${clover.jar}" />
+
+ <!-- check if clover reports should be generated -->
+ <condition property="clover.enabled">
+ <and>
+ <isset property="run.clover"/>
+ <isset property="clover.present"/>
+ </and>
+ </condition>
+
+ <!-- the normal classpath -->
+ <path id="classpath">
+ <pathelement location="${build.classes}"/>
+ <fileset dir="${lib.dir}">
+ <include name="hadoop-core-${version}.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <pathelement location="${conf.dir}"/>
+ <path refid="ivy-common.classpath"/>
+ </path>
+
+ <path id="test.classpath">
+ <pathelement location="${test.build.extraconf}"/>
+ <pathelement location="${test.hdfs.build.classes}" />
+ <pathelement location="${test.src.dir}"/>
+ <pathelement location="${build.dir}"/>
+ <pathelement location="${build.examples}"/>
+ <pathelement location="${build.tools}"/>
+ <pathelement path="${clover.jar}"/>
+ <path refid="ivy.test.classpath"/>
+ <fileset dir="${lib.dir}">
+ <include name="hadoop-core-test-${version}.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <path refid="classpath"/>
+ </path>
+
+ <path id="test.hdfs.with.mr.classpath">
+ <path refid="test.classpath"/>
+ <pathelement location="${test.hdfs.with.mr.build.classes}" />
+ <pathelement location="${lib.dir}/hadoop-mapred-test-${version}.jar" />
+ <pathelement location="${lib.dir}/hadoop-mapred-${version}.jar" />
+ <pathelement location="${lib.dir}/hadoop-mapred-tools-${version}.jar" />
+ </path>
+
+ <!-- the cluster test classpath: uses conf.dir for configuration -->
+ <path id="test.cluster.classpath">
+ <path refid="classpath"/>
+ <pathelement location="${test.build.classes}" />
+ <pathelement location="${test.src.dir}"/>
+ <pathelement location="${build.dir}"/>
+ </path>
+
+ <!-- properties dependent on the items defined above. -->
+ <!--<available classname="${rat.reporting.classname}" classpathref="classpath" property="rat.present" value="true"/> -->
+
+ <!-- ====================================================== -->
+ <!-- Macro definitions -->
+ <!-- ====================================================== -->
+ <macrodef name="macro_tar" description="Worker Macro for tar">
+ <attribute name="param.destfile"/>
+ <element name="param.listofitems"/>
+ <sequential>
+ <tar compression="gzip" longfile="gnu"
+ destfile="@{param.destfile}">
+ <param.listofitems/>
+ </tar>
+ </sequential>
+ </macrodef>
+
+ <!-- ====================================================== -->
+ <!-- Stuff needed by all targets -->
+ <!-- ====================================================== -->
+ <target name="init" depends="ivy-retrieve-common">
+ <mkdir dir="${build.dir}"/>
+ <mkdir dir="${build.classes}"/>
+ <mkdir dir="${build.src}"/>
+ <mkdir dir="${build.webapps}/hdfs/WEB-INF"/>
+ <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
+ <mkdir dir="${build.webapps}/secondary/WEB-INF"/>
+ <mkdir dir="${build.anttasks}"/>
+
+ <mkdir dir="${test.build.dir}"/>
+ <mkdir dir="${test.build.classes}"/>
+ <mkdir dir="${test.build.extraconf}"/>
+ <tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
+ <touch millis="0" file="${touch.temp.file}">
+ <fileset dir="${conf.dir}" includes="**/*.template"/>
+ <fileset dir="${contrib.dir}" includes="**/*.template"/>
+ </touch>
+ <delete file="${touch.temp.file}"/>
+ <!-- copy all of the jsp and static files -->
+ <copy todir="${build.webapps}">
+ <fileset dir="${src.webapps}">
+ <exclude name="**/*.jsp" />
+ </fileset>
+ </copy>
+ </target>
+
+ <!-- ====================================================== -->
+ <!-- Compile the Java files -->
+ <!-- ====================================================== -->
+ <target name="record-parser" depends="init" if="javacc.home">
+ <javacc
+ target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
+ outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
+ javacchome="${javacc.home}" />
+ </target>
+
+ <target name="compile-rcc-compiler" depends="init, record-parser">
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${hdfs.src.dir}"
+ includes="org/apache/hadoop/record/compiler/**/*.java"
+ destdir="${build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args}"/>
+ <classpath refid="classpath"/>
+ </javac>
+
+ <taskdef name="recordcc" classname="org.apache.hadoop.record.compiler.ant.RccTask">
+ <classpath refid="classpath" />
+ </taskdef>
+ </target>
+
+ <target name="compile-core-classes" depends="init, compile-rcc-compiler">
+ <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
+ <classpath refid="test.classpath"/>
+ </taskdef>
+ <!-- Compile Java files (excluding JSPs) checking warnings -->
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${core.src.dir}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="classpath"/>
+ </javac>
+
+ <copy todir="${build.classes}">
+ <fileset dir="${core.src.dir}" includes="**/*.properties"/>
+ <fileset dir="${core.src.dir}" includes="core-default.xml"/>
+ </copy>
+
+ </target>
+<!--
+ <target name="compile-mapred-classes" depends="compile-core-classes">
+ <jsp-compile
+ uriroot="${src.webapps}/task"
+ outputdir="${build.src}"
+ package="org.apache.hadoop.mapred"
+ webxml="${build.webapps}/task/WEB-INF/web.xml">
+ </jsp-compile>
+
+ <jsp-compile
+ uriroot="${src.webapps}/job"
+ outputdir="${build.src}"
+ package="org.apache.hadoop.mapred"
+ webxml="${build.webapps}/job/WEB-INF/web.xml">
+ </jsp-compile>
+
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${mapred.src.dir};${build.src}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="classpath"/>
+ </javac>
+
+ <copy todir="${build.classes}">
+ <fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
+ <fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/>
+ </copy>
+ </target>
+-->
+ <!--<target name="compile-hdfs-classes" depends="compile-core-classes">-->
+ <target name="compile-hdfs-classes" depends="init, compile-rcc-compiler">
+ <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
+ <classpath refid="classpath"/>
+ </taskdef>
+ <jsp-compile
+ uriroot="${src.webapps}/hdfs"
+ outputdir="${build.src}"
+ package="org.apache.hadoop.hdfs.server.namenode"
+ webxml="${build.webapps}/hdfs/WEB-INF/web.xml">
+ </jsp-compile>
+
+ <jsp-compile
+ uriroot="${src.webapps}/datanode"
+ outputdir="${build.src}"
+ package="org.apache.hadoop.hdfs.server.datanode"
+ webxml="${build.webapps}/datanode/WEB-INF/web.xml">
+ </jsp-compile>
+
+ <jsp-compile
+ uriroot="${src.webapps}/secondary"
+ outputdir="${build.src}"
+ package="org.apache.hadoop.hdfs.server.namenode"
+ webxml="${build.webapps}/secondary/WEB-INF/web.xml">
+ </jsp-compile>
+
+ <!-- Compile Java files (excluding JSPs) checking warnings -->
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${hdfs.src.dir};${build.src}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="classpath"/>
+ </javac>
+
+ <copy todir="${build.classes}">
+ <fileset dir="${hdfs.src.dir}" includes="**/*.properties"/>
+ <fileset dir="${hdfs.src.dir}" includes="hdfs-default.xml"/>
+ </copy>
+ </target>
+<!--
+ <target name="compile-tools" depends="init">
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${tools.src}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${build.tools}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="classpath"/>
+ </javac>
+
+ <copy todir="${build.tools}">
+ <fileset
+ dir="${tools.src}"
+ includes="**/*.properties"
+ />
+ </copy>
+ </target>
+
+ <target name="compile-native">
+ <antcall target="compile-core-native">
+ <param name="compile.native" value="true"/>
+ </antcall>
+ </target>
+
+ <target name="compile-core-native" depends="compile-core-classes"
+ if="compile.native">
+
+ <mkdir dir="${build.native}/lib"/>
+ <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
+
+ <javah
+ classpath="${build.classes}"
+ destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
+ force="yes"
+ verbose="yes"
+ >
+ <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
+ <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
+ </javah>
+
+ <exec dir="${build.native}" executable="sh" failonerror="true">
+ <env key="OS_NAME" value="${os.name}"/>
+ <env key="OS_ARCH" value="${os.arch}"/>
+ <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
+ <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
+ <arg line="${native.src.dir}/configure"/>
+ </exec>
+
+ <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
+ <env key="OS_NAME" value="${os.name}"/>
+ <env key="OS_ARCH" value="${os.arch}"/>
+ <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
+ <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
+ </exec>
+
+ <exec dir="${build.native}" executable="sh" failonerror="true">
+ <arg line="${build.native}/libtool mode=install cp ${build.native}/lib/libhadoop.la ${build.native}/lib"/>
+ </exec>
+
+ </target>
+-->
+ <target name="compile-core"
+ depends="clover,
+ compile-hdfs-classes"
+ description="Compile core only">
+ </target>
+
+ <target name="compile-contrib" depends="compile-core">
+ <subant target="compile">
+ <property name="version" value="${version}"/>
+ <fileset file="${contrib.dir}/build.xml"/>
+ </subant>
+ </target>
+
+ <target name="compile" depends="compile-core, compile-contrib, compile-ant-tasks" description="Compile core, contrib">
+ </target>
+
+ <target name="compile-examples"
+ depends="compile-core">
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${examples.dir}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${build.examples}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath>
+ <path refid="classpath"/>
+ <pathelement location="${build.tools}"/>
+ </classpath>
+ </javac>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Make hadoop.jar -->
+ <!-- ================================================================== -->
+ <!-- -->
+ <!-- ================================================================== -->
+ <target name="jar" depends="compile-core" description="Make hadoop.jar">
+ <!-- <tar compression="gzip" destfile="${build.classes}/bin.tgz">
+ <tarfileset dir="bin" mode="755"/>
+ </tar> -->
+ <jar jarfile="${hadoop-hdfs.jar}"
+ basedir="${build.classes}">
+ <manifest>
+ <section name="org/apache/hadoop">
+ <attribute name="Implementation-Title" value="${ant.project.name}"/>
+ <attribute name="Implementation-Version" value="${version}"/>
+ <attribute name="Implementation-Vendor" value="Apache"/>
+ </section>
+ </manifest>
+ <fileset file="${conf.dir}/commons-logging.properties"/>
+ <fileset file="${conf.dir}/log4j.properties"/>
+ <fileset file="${conf.dir}/hadoop-metrics.properties"/>
+ <zipfileset dir="${build.webapps}" prefix="webapps"/>
+ </jar>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Make the Hadoop examples jar. -->
+ <!-- ================================================================== -->
+ <!-- -->
+ <!-- ================================================================== -->
+<!--
+ <target name="examples" depends="jar, compile-examples" description="Make the Hadoop examples jar.">
+ <jar jarfile="${build.dir}/${final.name}-examples.jar"
+ basedir="${build.examples}">
+ <manifest>
+ <attribute name="Main-Class"
+ value="org/apache/hadoop/examples/ExampleDriver"/>
+ </manifest>
+ </jar>
+ </target>
+
+ <target name="tools-jar" depends="jar, compile-tools"
+ description="Make the Hadoop tools jar.">
+ <jar jarfile="${build.dir}/${final.name}-tools.jar"
+ basedir="${build.tools}">
+ <manifest>
+ <attribute name="Main-Class"
+ value="org/apache/hadoop/examples/ExampleDriver"/>
+ </manifest>
+ </jar>
+ </target>
+
+ <target name="generate-test-records" depends="compile-rcc-compiler">
+ <recordcc destdir="${test.generated.dir}">
+ <fileset dir="${test.src.dir}"
+ includes="**/*.jr" />
+ </recordcc>
+ </target>
+-->
+ <!-- ================================================================== -->
+ <!-- Compile test code -->
+ <!-- ================================================================== -->
+
+<!--
+ <target name="compile-core-test" depends="compile-core-classes, generate-test-records">
+ <mkdir dir="${test.core.build.classes}"/>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.generated.dir}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.core.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args}" />
+ <classpath refid="test.classpath"/>
+ </javac>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/core"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.core.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.classpath"/>
+ </javac>
+
+ <delete dir="${test.cache.data}"/>
+ <mkdir dir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/core/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
+ </target>
+-->
+
+ <target name="compile-hdfs-test" depends="compile-hdfs-classes, ivy-retrieve-test">
+ <mkdir dir="${test.hdfs.build.classes}"/>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/hdfs"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.hdfs.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.classpath"/>
+ </javac>
+
+ <delete dir="${test.cache.data}"/>
+ <mkdir dir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/testHDFSConf.xml" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir="${test.cache.data}"/>
+ </target>
+<!--
+ <target name="compile-mapred-test" depends="compile-examples, compile-hdfs-test">
+
+ <mkdir dir="${test.mapred.build.classes}"/>
+ <mkdir dir="${test.mapred.build.testjar}"/>
+ <mkdir dir="${test.mapred.build.testshell}"/>
+
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/mapred"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.mapred.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/mapred/testjar"
+ includes="*.java"
+ destdir="${test.mapred.build.testjar}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+
+ <delete file="${test.mapred.build.testjar}/testjob.jar"/>
+ <jar jarfile="${test.mapred.build.testjar}/testjob.jar"
+ basedir="${test.mapred.build.testjar}">
+ </jar>
+
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/mapred/testshell"
+ includes="*.java"
+ destdir="${test.mapred.build.testshell}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}"/>
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+ <delete file="${test.mapred.build.testshell}/testshell.jar"/>
+ <jar jarfile="${test.mapred.build.testshell}/testshell.jar"
+ basedir="${test.mapred.build.testshell}">
+ </jar>
+
+ <delete dir="${test.cache.data}"/>
+ <mkdir dir="${test.cache.data}"/>
+ <delete dir="${test.debug.data}"/>
+ <mkdir dir="${test.debug.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/cli/testMRConf.xml" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
+ </target>
+-->
+ <target name="compile-hdfs-with-mr-test" depends="compile-hdfs-test">
+ <mkdir dir="${test.hdfs.with.mr.build.classes}"/>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/hdfs-with-mr"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.hdfs.with.mr.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.hdfs.with.mr.classpath"/>
+ </javac>
+ </target>
+
+
+ <!-- ================================================================== -->
+ <!-- Make hadoop-test.jar -->
+ <!-- ================================================================== -->
+ <!-- -->
+ <!-- ================================================================== -->
+ <target name="jar-test" depends="jar-hdfs-test, jar-hdfswithmr-test" description="Make hadoop-test.jar"/>
+
+ <target name="jar-hdfs-test" depends="compile-hdfs-test" description="Make hadoop-hdfs-test.jar">
+ <copy todir="${test.build.classes}">
+ <fileset dir="${test.hdfs.build.classes}"/>
+ </copy>
+ <jar jarfile="${build.dir}/${test.hdfs.final.name}.jar"
+ basedir="${test.build.classes}">
+ <manifest>
+ <attribute name="Main-Class"
+ value="org/apache/hadoop/test/HdfsTestDriver"/>
+ <section name="org/apache/hadoop">
+ <attribute name="Implementation-Title" value="${ant.project.name}"/>
+ <attribute name="Implementation-Version" value="${version}"/>
+ <attribute name="Implementation-Vendor" value="Apache"/>
+ </section>
+ </manifest>
+ </jar>
+ </target>
+
+ <target name="jar-hdfswithmr-test" depends="compile-hdfs-with-mr-test" description="Make hadoop-hdfswithmr-test.jar">
+ <copy todir="${test.build.classes}">
+ <fileset dir="${test.hdfs.with.mr.build.classes}"/>
+ </copy>
+ <jar jarfile="${build.dir}/${test.hdfswithmr.final.name}.jar"
+ basedir="${test.build.classes}">
+ <manifest>
+ <attribute name="Main-Class"
+ value="org/apache/hadoop/test/HdfsWithMRTestDriver"/>
+ <section name="org/apache/hadoop">
+ <attribute name="Implementation-Title" value="${ant.project.name}"/>
+ <attribute name="Implementation-Version" value="${version}"/>
+ <attribute name="Implementation-Vendor" value="Apache"/>
+ </section>
+ </manifest>
+ </jar>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Run unit tests -->
+ <!-- ================================================================== -->
+ <target name="run-test-hdfs" depends="compile-hdfs-test" description="Run hdfs unit tests">
+ <delete dir="${test.build.data}"/>
+ <mkdir dir="${test.build.data}"/>
+ <delete dir="${test.log.dir}"/>
+ <mkdir dir="${test.log.dir}"/>
+ <copy file="${test.src.dir}/hadoop-policy.xml"
+ todir="${test.build.extraconf}" />
+ <junit showoutput="${test.output}"
+ printsummary="${test.junit.printsummary}"
+ haltonfailure="${test.junit.haltonfailure}"
+ fork="yes"
+ forkmode="${test.junit.fork.mode}"
+ maxmemory="${test.junit.maxmemory}"
+ dir="${basedir}" timeout="${test.timeout}"
+ errorProperty="tests.failed" failureProperty="tests.failed">
+ <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.cache.data" value="${test.cache.data}"/>
+ <sysproperty key="test.debug.data" value="${test.debug.data}"/>
+ <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
+ <sysproperty key="test.src.dir" value="${test.src.dir}"/>
+ <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
+ <sysproperty key="java.library.path"
+ value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+ <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+ <!-- set compile.c++ in the child jvm only if it is set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="compile.c++"/>
+ </syspropertyset>
+ <classpath refid="test.classpath"/>
+ <formatter type="${test.junit.output.format}" />
+ <batchtest todir="${test.build.dir}" unless="testcase">
+ <fileset dir="${test.src.dir}/hdfs"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="testcase">
+ <fileset dir="${test.src.dir}/hdfs" includes="**/${testcase}.java"/>
+ </batchtest>
+ </junit>
+ <antcall target="checkfailure"/>
+ </target>
+
+ <target name="run-test-hdfs-with-mr" depends="compile-hdfs-with-mr-test" description="Run hdfs unit tests that require mapred">
+
+ <delete dir="${test.build.data}"/>
+ <mkdir dir="${test.build.data}"/>
+ <delete dir="${test.log.dir}"/>
+ <mkdir dir="${test.log.dir}"/>
+ <copy file="${test.src.dir}/hadoop-policy.xml"
+ todir="${test.build.extraconf}" />
+ <junit showoutput="${test.output}"
+ printsummary="${test.junit.printsummary}"
+ haltonfailure="${test.junit.haltonfailure}"
+ fork="yes"
+ forkmode="${test.junit.fork.mode}"
+ maxmemory="${test.junit.maxmemory}"
+ dir="${basedir}" timeout="${test.timeout}"
+ errorProperty="tests.failed" failureProperty="tests.failed">
+ <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.cache.data" value="${test.cache.data}"/>
+ <sysproperty key="test.debug.data" value="${test.debug.data}"/>
+ <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
+ <sysproperty key="test.src.dir" value="${test.src.dir}"/>
+ <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
+ <sysproperty key="java.library.path"
+ value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+ <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+ <!-- set compile.c++ in the child jvm only if it is set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="compile.c++"/>
+ </syspropertyset>
+ <classpath refid="test.hdfs.with.mr.classpath"/>
+ <formatter type="${test.junit.output.format}" />
+ <batchtest todir="${test.build.dir}" unless="testcase">
+ <fileset dir="${test.src.dir}/hdfs-with-mr"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="testcase">
+ <fileset dir="${test.src.dir}/hdfs-with-mr" includes="**/${testcase}.java"/>
+ </batchtest>
+ </junit>
+ <antcall target="checkfailure"/>
+ </target>
+
+ <target name="checkfailure" if="tests.failed">
+ <touch file="${test.build.dir}/testsfailed"/>
+ <fail unless="continueOnFailure">Tests failed!</fail>
+ </target>
+
+ <target name="test-contrib" depends="compile, compile-hdfs-with-mr-test" description="Run contrib unit tests">
+ <subant target="test">
+ <property name="version" value="${version}"/>
+ <property name="clover.jar" value="${clover.jar}"/>
+ <fileset file="${contrib.dir}/build.xml"/>
+ </subant>
+ </target>
+
+ <target name="test-core" description="Run core, hdfs and mapred unit tests">
+ <delete file="${test.build.dir}/testsfailed"/>
+ <property name="continueOnFailure" value="true"/>
+ <antcall target="run-test-hdfs"/>
+ <antcall target="run-test-hdfs-with-mr"/>
+ <available file="${test.build.dir}/testsfailed" property="testsfailed"/>
+ <fail if="testsfailed">Tests failed!</fail>
+ </target>
+
+ <target name="test" depends="jar-test, test-core" description="Run all unit tests">
+ <subant target="test-contrib">
+ <fileset file="${basedir}/build.xml"/>
+ </subant>
+ </target>
+
+ <!-- Run all unit tests, not just Test*, and use non-test configuration. -->
+ <target name="test-cluster" description="Run all unit tests, not just Test*, and use non-test configuration.">
+ <antcall target="test">
+ <param name="test.include" value="*"/>
+ <param name="test.classpath.id" value="test.cluster.classpath"/>
+ </antcall>
+ </target>
+
+ <target name="nightly" depends="test, tar">
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Run optional third-party tool targets -->
+ <!-- ================================================================== -->
+ <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
+ <taskdef resource="checkstyletask.properties">
+ <classpath refid="checkstyle-classpath"/>
+ </taskdef>
+
+ <mkdir dir="${test.build.dir}"/>
+
+ <checkstyle config="${test.src.dir}/checkstyle.xml"
+ failOnViolation="false">
+ <fileset dir="${hdfs.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
+ <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
+ </checkstyle>
+
+ <xslt style="${test.src.dir}/checkstyle-noframes-sorted.xsl"
+ in="${test.build.dir}/checkstyle-errors.xml"
+ out="${test.build.dir}/checkstyle-errors.html"/>
+ </target>
+
+ <target name="check-for-checkstyle">
+ <available property="checkstyle.present" resource="checkstyletask.properties">
+ <classpath refid="checkstyle-classpath"/>
+ </available>
+ </target>
+
+ <property name="findbugs.home" value=""/>
+ <target name="findbugs" depends="check-for-findbugs, jar" if="findbugs.present" description="Run findbugs if present">
+ <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
+ <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
+ <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
+ <property name="findbugs.report.xmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.xml"/>
+ <taskdef name="findbugs" classname="edu.umd.cs.findbugs.anttask.FindBugsTask"
+ classpath="${findbugs.home}/lib/findbugs-ant.jar" />
+
+ <mkdir dir="${findbugs.out.dir}"/>
+
+ <findbugs home="${findbugs.home}" output="xml:withMessages"
+ outputFile="${findbugs.report.xmlfile}" effort="max"
+ excludeFilter="${findbugs.exclude.file}" jvmargs="-Xmx512M">
+ <auxClasspath>
+ <fileset dir="${lib.dir}">
+ <include name="**/*.jar"/>
+ </fileset>
+ <fileset dir="${build.ivy.lib.dir}/${ant.project.name}/common">
+ <include name="**/*.jar"/>
+ </fileset>
+ </auxClasspath>
+ <sourcePath path="${hdfs.src.dir}"/>
+ <class location="${basedir}/build/${final.name}.jar" />
+ </findbugs>
+
+ <xslt style="${findbugs.home}/src/xsl/default.xsl"
+ in="${findbugs.report.xmlfile}"
+ out="${findbugs.report.htmlfile}"/>
+ </target>
+
+ <target name="check-for-findbugs">
+ <available property="findbugs.present"
+ file="${findbugs.home}/lib/findbugs.jar" />
+ </target>
+
+
+ <!-- ================================================================== -->
+ <!-- Documentation -->
+ <!-- ================================================================== -->
+
+ <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line." if="forrest.home">
+ <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
+ failonerror="true">
+ <env key="JAVA_HOME" value="${java5.home}"/>
+ </exec>
+ <copy todir="${build.docs}">
+ <fileset dir="${docs.src}/build/site/" />
+ </copy>
+ <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
+ <style basedir="${hdfs.src.dir}" destdir="${build.docs}"
+ includes="hdfs-default.xml" style="conf/configuration.xsl"/>
+ <antcall target="changes-to-html"/>
+ <antcall target="cn-docs"/>
+ </target>
+
+ <target name="cn-docs" depends="forrest.check, init"
+ description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=<base of Apache Forrest installation> on the command line."
+ if="forrest.home">
+ <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true">
+ <env key="LANG" value="en_US.utf8"/>
+ <env key="JAVA_HOME" value="${java5.home}"/>
+ </exec>
+ <copy todir="${build.docs.cn}">
+ <fileset dir="${src.docs.cn}/build/site/" />
+ </copy>
+ <style basedir="${hdfs.src.dir}" destdir="${build.docs.cn}"
+ includes="hdfs-default.xml" style="conf/configuration.xsl"/>
+ <antcall target="changes-to-html"/>
+ </target>
+
+ <target name="forrest.check" unless="forrest.home" depends="java5.check">
+ <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=<base of Apache Forrest installation> to Ant on the command-line." />
+ </target>
+
+ <target name="java5.check" unless="java5.home">
+ <fail message="'java5.home' is not defined. Forrest requires Java 5. Please pass -Djava5.home=<base of Java 5 distribution> to Ant on the command-line." />
+ </target>
+
+ <target name="javadoc-dev" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc for hadoop developers">
+ <mkdir dir="${build.javadoc.dev}"/>
+ <javadoc
+ overview="${hdfs.src.dir}/overview.html"
+ packagenames="org.apache.hadoop.*"
+ destdir="${build.javadoc.dev}"
+ author="true"
+ version="true"
+ use="true"
+ windowtitle="${Name} ${version} API"
+ doctitle="${Name} ${version} Developer API"
+ bottom="Copyright &copy; ${year} The Apache Software Foundation"
+ maxmemory="${javadoc.maxmemory}">
+ <packageset dir="${hdfs.src.dir}"/>
+ <link href="${javadoc.link.java}"/>
+ <classpath >
+ <path refid="classpath" />
+ <path refid="javadoc-classpath"/>
+ <pathelement path="${java.class.path}"/>
+ </classpath>
+ <group title="${ant.project.name}" packages="org.apache.*"/>
+ </javadoc>
+ </target>
+
+ <target name="javadoc-uptodate" depends="compile, ivy-retrieve-javadoc">
+ <uptodate property="javadoc.is.uptodate">
+ <srcfiles dir="${src.dir}">
+ <include name="**/*.java" />
+ <include name="**/*.html" />
+ </srcfiles>
+ <mapper type="merge" to="${build.javadoc.timestamp}" />
+ </uptodate>
+ </target>
+
+ <target name="javadoc" description="Generate javadoc" depends="javadoc-uptodate"
+ unless="javadoc.is.uptodate">
+ <mkdir dir="${build.javadoc}"/>
+ <javadoc
+ overview="${hdfs.src.dir}/overview.html"
+ packagenames="org.apache.hadoop.*"
+ destdir="${build.javadoc}"
+ author="true"
+ version="true"
+ use="true"
+ windowtitle="${Name} ${version} API"
+ doctitle="${Name} ${version} API"
+ bottom="Copyright &copy; ${year} The Apache Software Foundation"
+ maxmemory="${javadoc.maxmemory}">
+
+ <packageset dir="${hdfs.src.dir}"/>
+ <link href="${javadoc.link.java}"/>
+ <classpath >
+ <path refid="classpath" />
+ <path refid="javadoc-classpath"/>
+ <pathelement path="${java.class.path}"/>
+ <pathelement location="${build.tools}"/>
+ </classpath>
+
+ <group title="${ant.project.name}" packages="org.apache.*"/>
+ </javadoc>
+ </target>
+
+ <target name="api-xml" depends="ivy-retrieve-jdiff,javadoc,write-null">
+ <javadoc maxmemory="${javadoc.maxmemory}">
+ <doclet name="jdiff.JDiff"
+ path="${jdiff.jar}:${xerces.jar}">
+ <param name="-apidir" value="${jdiff.xml.dir}"/>
+ <param name="-apiname" value="${ant.project.name} ${version}"/>
+ </doclet>
+ <packageset dir="src/java"/>
+ <classpath >
+ <path refid="classpath" />
+ <path refid="jdiff-classpath" />
+ <pathelement path="${java.class.path}"/>
+ </classpath>
+ </javadoc>
+ </target>
+
+ <target name="write-null">
+ <exec executable="touch">
+ <arg value="${jdiff.home}/Null.java"/>
+ </exec>
+ </target>
+
+ <target name="api-report" depends="ivy-retrieve-jdiff,api-xml">
+ <mkdir dir="${jdiff.build.dir}"/>
+ <javadoc sourcepath="src/java"
+ destdir="${jdiff.build.dir}"
+ sourceFiles="${jdiff.home}/Null.java"
+ maxmemory="${javadoc.maxmemory}">
+ <doclet name="jdiff.JDiff"
+ path="${jdiff.jar}:${xerces.jar}">
+ <param name="-oldapi" value="${ant.project.name} ${jdiff.stable}"/>
+ <param name="-newapi" value="${ant.project.name} ${version}"/>
+ <param name="-oldapidir" value="${jdiff.xml.dir}"/>
+ <param name="-newapidir" value="${jdiff.xml.dir}"/>
+ <param name="-javadocold" value="${jdiff.stable.javadoc}"/>
+ <param name="-javadocnew" value="../../api/"/>
+ <param name="-stats"/>
+ </doclet>
+ <classpath >
+ <path refid="classpath" />
+ <path refid="jdiff-classpath"/>
+ <pathelement path="${java.class.path}"/>
+ </classpath>
+ </javadoc>
+ </target>
+
+ <target name="changes-to-html" description="Convert CHANGES.txt into an html file">
+ <mkdir dir="${build.docs}"/>
+ <exec executable="perl" input="CHANGES.txt" output="${build.docs}/changes.html" failonerror="true">
+ <arg value="${changes.src}/changes2html.pl"/>
+ </exec>
+ <copy todir="${build.docs}">
+ <fileset dir="${changes.src}" includes="*.css"/>
+ </copy>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- D I S T R I B U T I O N -->
+ <!-- ================================================================== -->
+ <!-- -->
+ <!-- ================================================================== -->
+ <target name="package" depends="compile, jar, javadoc, docs, api-report, jar-test, ant-tasks"
+ description="Build distribution">
+ <mkdir dir="${dist.dir}"/>
+ <mkdir dir="${dist.dir}/lib"/>
+ <mkdir dir="${dist.dir}/contrib"/>
+ <mkdir dir="${dist.dir}/bin"/>
+ <mkdir dir="${dist.dir}/docs"/>
+ <mkdir dir="${dist.dir}/docs/api"/>
+ <mkdir dir="${dist.dir}/docs/jdiff"/>
+
+ <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
+ <fileset dir="${common.ivy.lib.dir}"/>
+ </copy>
+
+ <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
+ <fileset dir="lib">
+ <exclude name="**/native/**"/>
+ </fileset>
+ </copy>
+
+ <exec dir="${dist.dir}" executable="sh" failonerror="true">
+ <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
+ <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
+ <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
+ <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
+ </exec>
+
+ <subant target="package">
+ <!--Pass down the version in case its needed again and the target
+ distribution directory so contribs know where to install to.-->
+ <property name="version" value="${version}"/>
+ <property name="dist.dir" value="${dist.dir}"/>
+ <fileset file="${contrib.dir}/build.xml"/>
+ </subant>
+
+ <copy todir="${dist.dir}/webapps">
+ <fileset dir="${build.webapps}"/>
+ </copy>
+
+ <copy todir="${dist.dir}">
+ <fileset file="${build.dir}/${final.name}-*.jar"/>
+ </copy>
+
+ <copy todir="${dist.dir}/bin">
+ <fileset dir="bin"/>
+ </copy>
+
+ <copy todir="${dist.dir}/conf">
+ <fileset dir="${conf.dir}" excludes="**/*.template"/>
+ </copy>
+
+ <copy todir="${dist.dir}/docs">
+ <fileset dir="${build.docs}"/>
+ </copy>
+
+ <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
+
+ <copy todir="${dist.dir}/ivy">
+ <fileset dir="ivy"/>
+ </copy>
+
+ <copy todir="${dist.dir}">
+ <fileset dir=".">
+ <include name="*.txt" />
+ </fileset>
+ </copy>
+
+ <copy todir="${dist.dir}/src" includeEmptyDirs="true">
+ <fileset dir="src" excludes="**/*.template **/docs/build/**/*"/>
+ </copy>
+
+ <copy todir="${dist.dir}/" file="build.xml"/>
+
+ <chmod perm="ugo+x" type="file" parallel="false">
+ <fileset dir="${dist.dir}/src/contrib/">
+ <include name="*/bin/*" />
+ </fileset>
+ <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/>
+ </chmod>
+
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Make release tarball -->
+ <!-- ================================================================== -->
+ <target name="tar" depends="package" description="Make release tarball">
+ <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
+ <param.listofitems>
+ <tarfileset dir="${build.dir}" mode="664">
+ <exclude name="${final.name}/bin/*" />
+ <exclude name="${final.name}/contrib/*/bin/*" />
+ <exclude name="${final.name}/src/contrib/ec2/bin/*" />
+ <exclude name="${final.name}/src/contrib/ec2/bin/image/*" />
+ <include name="${final.name}/**" />
+ </tarfileset>
+ <tarfileset dir="${build.dir}" mode="755">
+ <include name="${final.name}/bin/*" />
+ <include name="${final.name}/contrib/*/bin/*" />
+ <include name="${final.name}/src/contrib/ec2/bin/*" />
+ <include name="${final.name}/src/contrib/ec2/bin/image/*" />
+ </tarfileset>
+ </param.listofitems>
+ </macro_tar>
+ </target>
+
+ <target name="bin-package" depends="compile, jar, jar-test, ant-tasks"
+ description="assembles artifacts for binary target">
+ <mkdir dir="${dist.dir}"/>
+ <mkdir dir="${dist.dir}/lib"/>
+ <mkdir dir="${dist.dir}/contrib"/>
+ <mkdir dir="${dist.dir}/bin"/>
+
+ <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
+ <fileset dir="${common.ivy.lib.dir}"/>
+ </copy>
+
+ <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
+ <fileset dir="lib">
+ <exclude name="**/native/**"/>
+ </fileset>
+ </copy>
+
+ <exec dir="${dist.dir}" executable="sh" failonerror="true">
+ <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
+ <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
+ <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
+ <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
+ </exec>
+
+ <subant target="package">
+ <!--Pass down the version in case its needed again and the target
+ distribution directory so contribs know where to install to.-->
+ <property name="version" value="${version}"/>
+ <property name="dist.dir" value="${dist.dir}"/>
+ <fileset file="${contrib.dir}/build.xml"/>
+ </subant>
+
+ <copy todir="${dist.dir}/webapps">
+ <fileset dir="${build.webapps}"/>
+ </copy>
+
+ <copy todir="${dist.dir}">
+ <fileset file="${build.dir}/${final.name}-*.jar"/>
+ </copy>
+
+ <copy todir="${dist.dir}/bin">
+ <fileset dir="bin"/>
+ </copy>
+
+ <copy todir="${dist.dir}/conf">
+ <fileset dir="${conf.dir}" excludes="**/*.template"/>
+ </copy>
+
+ <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
+
+ <copy todir="${dist.dir}/ivy">
+ <fileset dir="ivy"/>
+ </copy>
+
+ <copy todir="${dist.dir}">
+ <fileset dir=".">
+ <include name="*.txt" />
+ </fileset>
+ </copy>
+
+ <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
+ <fileset dir="${build.dir}/c++"/>
+ </copy>
+
+ <copy todir="${dist.dir}/" file="build.xml"/>
+
+ <chmod perm="ugo+x" type="file" parallel="false">
+ <fileset dir="${dist.dir}/bin"/>
+ </chmod>
+ </target>
+
+ <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
+ <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
+ <param.listofitems>
+ <tarfileset dir="${build.dir}" mode="664">
+ <exclude name="${final.name}/bin/*" />
+ <exclude name="${final.name}/src/**" />
+ <exclude name="${final.name}/docs/**" />
+ <include name="${final.name}/**" />
+ </tarfileset>
+ <tarfileset dir="${build.dir}" mode="755">
+ <include name="${final.name}/bin/*" />
+ </tarfileset>
+ </param.listofitems>
+ </macro_tar>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Perform audit activities for the release -->
+ <!-- ================================================================== -->
+ <target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit" description="Release Audit activities">
+ <fail unless="rat.present" message="Failed to load class [${rat.reporting.classname}]."/>
+ <java classname="${rat.reporting.classname}" fork="true">
+ <classpath refid="releaseaudit-classpath"/>
+ <arg value="${build.dir}/${final.name}"/>
+ </java>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Clean. Delete the build files, and their directories -->
+ <!-- ================================================================== -->
+ <target name="clean" depends="clean-contrib" description="Clean. Delete the build files, and their directories">
+ <delete dir="${build.dir}"/>
+ <delete dir="${docs.src}/build"/>
+ <delete dir="${src.docs.cn}/build"/>
+ </target>
+
+ <!-- ================================================================== -->
+ <!-- Clean contrib target. For now, must be called explicitly -->
+ <!-- Using subant instead of ant as a workaround for 30569 -->
+ <!-- ================================================================== -->
+ <target name="clean-contrib">
+ <subant target="clean">
+ <fileset file="src/contrib/build.xml"/>
+ </subant>
+ </target>
+
+ <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs">
+ <delete dir="${test.libhdfs.dir}"/>
+ <mkdir dir="${test.libhdfs.dir}"/>
+ <mkdir dir="${test.libhdfs.dir}/logs"/>
+ <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
+
+ <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true">
+ <env key="OS_NAME" value="${os.name}"/>
+ <env key="OS_ARCH" value="${os.arch}"/>
+ <env key="JVM_ARCH" value="${jvm.arch}"/>
+ <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
+ <env key="HADOOP_HOME" value="${basedir}"/>
+ <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
+ <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
+ <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
+ <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>
+ <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
+ <arg value="test"/>
+ </exec>
+ </target>
+
+<!-- ================================================================== -->
+<!-- librecordio targets. -->
+<!-- ================================================================== -->
+
+ <target name="compile-librecordio" depends="init" if="librecordio" >
+ <mkdir dir="${build.librecordio}"/>
+ <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
+ <env key="XERCESCROOT" value="${xercescroot}"/>
+ <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
+ </exec>
+ </target>
+
+ <target name="test-librecordio" depends="compile-librecordio, compile-core" if="librecordio">
+ <delete dir="${librecordio.test.dir}"/>
+ <mkdir dir="${librecordio.test.dir}"/>
+ <exec dir="${librecordio.src}/test" executable="${make.cmd}" failonerror="true">
+ <env key="HADOOP_HOME" value="${basedir}"/>
+ <env key="XERCESCROOT" value="${xercescroot}"/>
+ <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
+ <env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/>
+ <arg value="all"/>
+ </exec>
+ </target>
+
+ <target name="package-librecordio" depends="compile-librecordio" if="librecordio">
+ <mkdir dir="${dist.dir}/librecordio"/>
+ <copy todir="${dist.dir}/librecordio">
+ <fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false">
+ <exclude name="**/tests/**"/>
+ <exclude name="*.so"/>
+ <exclude name="*.o"/>
+ </fileset>
+ </copy>
+ <chmod perm="ugo+x" type="file">
+ <fileset dir="${dist.dir}/librecordio"/>
+ </chmod>
+ </target>
+
+ <target name="create-c++-configure" depends="init" if="compile.c++">
+ <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
+ failonerror="yes">
+ <arg value="-if"/>
+ </exec>
+ <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes"
+ failonerror="yes">
+ <arg value="-if"/>
+ </exec>
+ <exec executable="autoreconf" dir="${c++.examples.pipes.src}"
+ searchpath="yes" failonerror="yes">
+ <arg value="-if"/>
+ </exec>
+ <antcall target="create-c++-configure-libhdfs"/>
+ </target>
+
+ <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
+ <exec executable="autoreconf" dir="${c++.libhdfs.src}"
+ searchpath="yes" failonerror="yes">
+ <arg value="-if"/>
+ </exec>
+ </target>
+
+ <target name="check-c++-makefiles" depends="init" if="compile.c++">
+ <condition property="need.c++.utils.makefile">
+ <not> <available file="${build.c++.utils}/Makefile"/> </not>
+ </condition>
+ <condition property="need.c++.pipes.makefile">
+ <not> <available file="${build.c++.pipes}/Makefile"/> </not>
+ </condition>
+ <condition property="need.c++.examples.pipes.makefile">
+ <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
+ </condition>
+ </target>
+
+ <target name="check-c++-libhdfs">
+ <condition property="islibhdfs">
+ <and>
+ <isset property="compile.c++"/>
+ <isset property="libhdfs"/>
+ </and>
+ </condition>
+ </target>
+
+ <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
+ <condition property="need.c++.libhdfs.makefile">
+ <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
+ </condition>
+ </target>
+
+ <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs"
+ if="need.c++.libhdfs.makefile">
+ <mkdir dir="${build.c++.libhdfs}"/>
+ <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
+ <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
+ failonerror="yes">
+ <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
+ <env key="JVM_ARCH" value="${jvm.arch}"/>
+ <arg value="--prefix=${install.c++}"/>
+ </exec>
+ </target>
+
+ <target name="create-c++-utils-makefile" depends="check-c++-makefiles"
+ if="need.c++.utils.makefile">
+ <mkdir dir="${build.c++.utils}"/>
+ <chmod file="${c++.utils.src}/configure" perm="ugo+x"/>
+ <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
+ failonerror="yes">
+ <arg value="--prefix=${install.c++}"/>
+ </exec>
+ </target>
+
+ <target name="compile-c++-utils" depends="create-c++-utils-makefile"
+ if="compile.c++">
+ <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes"
+ failonerror="yes">
+ <arg value="install"/>
+ </exec>
+ </target>
+
+ <target name="create-c++-pipes-makefile" depends="check-c++-makefiles"
+ if="need.c++.pipes.makefile">
+ <mkdir dir="${build.c++.pipes}"/>
+ <chmod file="${c++.pipes.src}/configure" perm="ugo+x"/>
+ <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
+ failonerror="yes">
+ <arg value="--prefix=${install.c++}"/>
+ </exec>
+ </target>
+
+ <target name="compile-c++-pipes"
+ depends="create-c++-pipes-makefile,compile-c++-utils"
+ if="compile.c++">
+ <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes"
+ failonerror="yes">
+ <arg value="install"/>
+ </exec>
+ </target>
+
+ <target name="compile-c++"
+ depends="compile-c++-pipes"/>
+
+ <target name="create-c++-examples-pipes-makefile"
+ depends="check-c++-makefiles"
+ if="need.c++.examples.pipes.makefile">
+ <mkdir dir="${build.c++.examples.pipes}"/>
+ <chmod file="${c++.examples.pipes.src}/configure" perm="ugo+x"/>
+ <exec executable="${c++.examples.pipes.src}/configure"
+ dir="${build.c++.examples.pipes}"
+ failonerror="yes">
+ <arg value="--prefix=${install.c++.examples}"/>
+ <arg value="--with-hadoop-utils=${install.c++}"/>
+ <arg value="--with-hadoop-pipes=${install.c++}"/>
+ </exec>
+ </target>
+
+ <target name="compile-c++-examples-pipes"
+ depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
+ if="compile.c++">
+ <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}" searchpath="yes"
+ failonerror="yes">
+ <arg value="install"/>
+ </exec>
+ </target>
+
+ <target name="compile-c++-examples"
+ depends="compile-c++-examples-pipes"/>
+
+ <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs">
+ <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
+ failonerror="yes">
+ <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
+ <env key="JVM_ARCH" value="${jvm.arch}"/>
+ <arg value="install"/>
+ </exec>
+ </target>
+
+
+
+ <target name="compile-ant-tasks" depends="compile-core">
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${anttasks.dir}"
+ includes="org/apache/hadoop/ant/**/*.java"
+ destdir="${build.anttasks}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args}"/>
+ <classpath refid="classpath"/>
+ </javac>
+ </target>
+
+ <target name="ant-tasks" depends="jar, compile-ant-tasks">
+ <copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
+ todir="${build.anttasks}/org/apache/hadoop/ant"/>
+ <jar destfile="${build.dir}/${final.name}-ant.jar">
+ <fileset dir="${build.anttasks}"/>
+ </jar>
+ </target>
+
+
+
+ <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. To use, specify -Dclover.home=<base of clover installation> -Drun.clover=true on the command line."/>
+
+<target name="clover.setup" if="clover.enabled">
+ <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
+ <mkdir dir="${clover.db.dir}"/>
+ <clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
+ <fileset dir="src" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/>
+ </clover-setup>
+</target>
+
+<target name="clover.info" unless="clover.present">
+ <echo>
+ Clover not found. Code coverage reports disabled.
+ </echo>
+</target>
+
+<target name="clover.check">
+ <fail unless="clover.present">
+ ##################################################################
+ Clover not found.
+ Please specify -Dclover.home=<base of clover installation>
+ on the command line.
+ ##################################################################
+ </fail>
+</target>
+
+<target name="generate-clover-reports" depends="clover.check, clover">
+ <mkdir dir="${clover.report.dir}"/>
+ <clover-report>
+ <current outfile="${clover.report.dir}" title="${final.name}">
+ <format type="html"/>
+ </current>
+ </clover-report>
+ <clover-report>
+ <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
+ <format type="xml"/>
+ </current>
+ </clover-report>
+</target>
+
+<target name="findbugs.check" depends="check-for-findbugs" unless="findbugs.present">
+ <fail message="'findbugs.home' is not defined. Please pass -Dfindbugs.home=<base of Findbugs installation> to Ant on the command-line." />
+</target>
+
+<target name="patch.check" unless="patch.file">
+ <fail message="'patch.file' is not defined. Please pass -Dpatch.file=<location of patch file> to Ant on the command-line." />
+</target>
+
+<target name="test-patch" depends="patch.check,findbugs.check,forrest.check">
+ <exec executable="bash" failonerror="true">
+ <arg value="${basedir}/src/test/bin/test-patch.sh"/>
+ <arg value="DEVELOPER"/>
+ <arg value="${patch.file}"/>
+ <arg value="${scratch.dir}"/>
+ <arg value="${svn.cmd}"/>
+ <arg value="${grep.cmd}"/>
+ <arg value="${patch.cmd}"/>
+ <arg value="${findbugs.home}"/>
+ <arg value="${forrest.home}"/>
+ <arg value="${basedir}"/>
+ <arg value="${java5.home}"/>
+ </exec>
+</target>
+
+<target name="hudson-test-patch" depends="findbugs.check,forrest.check">
+ <exec executable="bash" failonerror="true">
+ <arg value="${basedir}/src/test/bin/test-patch.sh"/>
+ <arg value="HUDSON"/>
+ <arg value="${scratch.dir}"/>
+ <arg value="${support.dir}"/>
+ <arg value="${ps.cmd}"/>
+ <arg value="${wget.cmd}"/>
+ <arg value="${jiracli.cmd}"/>
+ <arg value="${svn.cmd}"/>
+ <arg value="${grep.cmd}"/>
+ <arg value="${patch.cmd}"/>
+ <arg value="${findbugs.home}"/>
+ <arg value="${forrest.home}"/>
+ <arg value="${eclipse.home}"/>
+ <arg value="${python.home}"/>
+ <arg value="${basedir}"/>
+ <arg value="${trigger.url}"/>
+ <arg value="${jira.passwd}"/>
+ <arg value="${java5.home}"/>
+ <arg value="${curl.cmd}"/>
+ <arg value="${defect}"/>
+ </exec>
+</target>
+
+ <target name="eclipse-files" depends="init"
+ description="Generate files for Eclipse">
+ <pathconvert property="eclipse.project">
+ <path path="${basedir}"/>
+ <regexpmapper from="^.*/([^/]+)$$" to="\1" handledirsep="yes"/>
+ </pathconvert>
+ <copy todir="." overwrite="true">
+ <fileset dir=".eclipse.templates">
+ <exclude name="**/README.txt"/>
+ </fileset>
+ <filterset>
+ <filter token="PROJECT" value="${eclipse.project}"/>
+ </filterset>
+ </copy>
+ </target>
+
+ <target name="ivy-init-dirs">
+ <mkdir dir="${build.ivy.dir}" />
+ <mkdir dir="${build.ivy.lib.dir}" />
+ <mkdir dir="${build.ivy.report.dir}" />
+ <mkdir dir="${build.ivy.maven.dir}" />
+ </target>
+
+ <target name="ivy-probe-antlib" >
+ <condition property="ivy.found">
+ <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
+ </condition>
+ </target>
+
+ <target name="ivy-download" description="To download ivy" unless="offline">
+ <get src="${ivy_repo_url}" dest="${ivy.jar}" usetimestamp="true"/>
+ </target>
+
+ <!--
+ To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
+ Also note how we skip loading Ivy if it is already there, just to make sure all is well.
+ -->
+ <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
+ <typedef uri="antlib:org.apache.ivy.ant" onerror="fail"
+ loaderRef="ivyLoader">
+ <classpath>
+ <pathelement location="${ivy.jar}"/>
+ </classpath>
+ </typedef>
+ <fail >
+ <condition >
+ <not>
+ <typefound uri="antlib:org.apache.ivy.ant" name="cleancache"/>
+ </not>
+ </condition>
+ You need Apache Ivy 2.0 or later from http://ant.apache.org/
+ It could not be loaded from ${ivy_repo_url}
+ </fail>
+ </target>
+
+
+ <target name="ivy-init" depends="ivy-init-antlib" >
+
+ <!--Configure Ivy by reading in the settings file
+ If anyone has already read in a settings file into this settings ID, it gets priority
+ -->
+ <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
+ </target>
+
+ <target name="ivy-resolve" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/>
+ </target>
+
+ <target name="ivy-resolve-javadoc" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/>
+ </target>
+
+ <target name="ivy-resolve-releaseaudit" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
+ </target>
+
+ <target name="ivy-resolve-test" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" />
+ </target>
+
+ <target name="ivy-resolve-common" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
+ </target>
+
+ <target name="ivy-resolve-jdiff" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" />
+ </target>
+
+ <target name="ivy-resolve-checkstyle" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/>
+ </target>
+
+ <target name="ivy-retrieve" depends="ivy-resolve"
+ description="Retrieve Ivy-managed artifacts">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+ </target>
+
+ <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
+ description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+ <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
+ </target>
+
+ <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
+ description="Retrieve Ivy-managed artifacts for the javadoc configurations">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+ <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
+ </target>
+
+ <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
+ description="Retrieve Ivy-managed artifacts for the javadoc configurations">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+ <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
+ </target>
+
+ <target name="ivy-retrieve-test" depends="ivy-resolve-test"
+ description="Retrieve Ivy-managed artifacts for the test configurations">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+ <ivy:cachepath pathid="ivy.test.classpath" conf="test"/>
+ </target>
+
+ <target name="ivy-retrieve-common" depends="ivy-resolve-common"
+ description="Retrieve Ivy-managed artifacts for the compile configurations">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+ <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
+ </target>
+
+ <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
+ description="Retrieve Ivy-managed artifacts for the compile configurations">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" />
+ <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
+ <available classname="${rat.reporting.classname}"
+ classpathref="releaseaudit-classpath" property="rat.present" value="true"/>
+ </target>
+
+ <target name="ivy-report" depends="ivy-resolve-releaseaudit"
+ description="Generate">
+ <ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
+ <echo>
+ Reports generated:${build.ivy.report.dir}
+ </echo>
+ </target>
+
+ <target name="assert-hadoop-jar-exists" depends="ivy-init">
+ <fail>
+ <condition >
+ <not>
+ <available file="${hadoop.jar}" />
+ </not>
+ </condition>
+ Not found: ${hadoop.jar}
+ Please run the target "jar" in the main build file
+ </fail>
+
+ </target>
+
+ <target name="ready-to-publish" depends="jar,assert-hadoop-jar-exists,ivy-resolve"/>
+
+ <target name="ivy-publish-local" depends="ready-to-publish,ivy-resolve">
+ <ivy:publish
+ settingsRef="${ant.project.name}.ivy.settings"
+ resolver="local"
+ pubrevision="${version}"
+ overwrite="true"
+ artifactspattern="${build.dir}/${ivy.publish.pattern}" />
+ </target>
+
+
+ <!-- this is here for curiosity, to see how well the makepom task works
+ Answer: it depends whether you want transitive dependencies excluded or not
+ -->
+ <target name="makepom" depends="ivy-resolve">
+ <ivy:makepom settingsRef="${ant.project.name}.ivy.settings"
+ ivyfile="ivy.xml"
+ pomfile="${build.ivy.maven.dir}/generated.pom">
+ <ivy:mapping conf="default" scope="default"/>
+ <ivy:mapping conf="master" scope="master"/>
+ <ivy:mapping conf="runtime" scope="runtime"/>
+ </ivy:makepom>
+ </target>
+
+
+ <target name="copy-jar-to-maven" depends="ready-to-publish">
+ <copy file="${hadoop.jar}"
+ tofile="${build.ivy.maven.jar}"/>
+ <checksum file="${build.ivy.maven.jar}" algorithm="md5"/>
+ </target>
+
+ <target name="copypom" depends="ivy-init-dirs">
+
+ <presetdef name="expandingcopy" >
+ <copy overwrite="true">
+ <filterchain>
+ <expandproperties/>
+ </filterchain>
+ </copy>
+ </presetdef>
+
+ <expandingcopy file="ivy/hadoop-core.pom"
+ tofile="${build.ivy.maven.pom}"/>
+ <checksum file="${build.ivy.maven.pom}" algorithm="md5"/>
+ </target>
+
+ <target name="maven-artifacts" depends="copy-jar-to-maven,copypom" />
+
+ <target name="published" depends="ivy-publish-local,maven-artifacts">
+
+ </target>
+
+ <!-- taskcontroller targets -->
+ <target name="init-task-controller-build">
+ <mkdir dir="${build.c++.task-controller}" />
+ <copy todir="${build.c++.task-controller}">
+ <fileset dir="${c++.task-controller.src}" includes="*.c">
+ </fileset>
+ <fileset dir="${c++.task-controller.src}" includes="*.h">
+ </fileset>
+ </copy>
+ <chmod file="${c++.task-controller.src}/configure" perm="ugo+x"/>
+ <condition property="task-controller.conf.dir.passed">
+ <not>
+ <equals arg1="${hadoop.conf.dir}" arg2="$${hadoop.conf.dir}"/>
+ </not>
+ </condition>
+ </target>
+ <target name="configure-task-controller" depends="init,
+ init-task-controller-build,
+ task-controller-configuration-with-confdir,
+ task-controller-configuration-with-no-confdir">
+ </target>
+ <target name="task-controller-configuration-with-confdir"
+ if="task-controller.conf.dir.passed" >
+ <exec executable="${c++.task-controller.src}/configure"
+ dir="${build.c++.task-controller}" failonerror="yes">
+ <arg value="--prefix=${task-controller.install.dir}" />
+ <arg value="--with-confdir=${hadoop.conf.dir}" />
+ </exec>
+ </target>
+ <target name="task-controller-configuration-with-no-confdir"
+ unless="task-controller.conf.dir.passed">
+ <exec executable="${c++.task-controller.src}/configure"
+ dir="${build.c++.task-controller}" failonerror="yes">
+ <arg value="--prefix=${task-controller.install.dir}" />
+ </exec>
+ </target>
+ <!--
+ * Create the installation directory.
+ * Do a make install.
+ -->
+ <target name="task-controller" depends="configure-task-controller">
+ <mkdir dir="${task-controller.install.dir}" />
+ <exec executable="${make.cmd}" dir="${build.c++.task-controller}"
+ searchpath="yes" failonerror="yes">
+ <arg value="install" />
+ </exec>
+ </target>
+ <!-- end of task-controller target -->
+</project>