You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2009/11/28 20:53:40 UTC

svn commit: r885142 [1/6] - in /hadoop/common/branches/HADOOP-6194: ./ .eclipse.templates/ bin/ ivy/ lib/jdiff/ src/ src/contrib/ src/contrib/ec2/ src/docs/ src/docs/src/documentation/ src/docs/src/documentation/content/xdocs/ src/docs/src/documentatio...

Author: stevel
Date: Sat Nov 28 19:53:33 2009
New Revision: 885142

URL: http://svn.apache.org/viewvc?rev=885142&view=rev
Log:
HADOOP-6194 HDFS-326 resync with trunk.

Added:
    hadoop/common/branches/HADOOP-6194/ivy/hadoop-core-template.xml
      - copied unchanged from r884903, hadoop/common/trunk/ivy/hadoop-core-template.xml
    hadoop/common/branches/HADOOP-6194/ivy/hadoop-core-test-template.xml
      - copied unchanged from r884903, hadoop/common/trunk/ivy/hadoop-core-test-template.xml
    hadoop/common/branches/HADOOP-6194/lib/jdiff/hadoop_0.20.1.xml
      - copied unchanged from r884903, hadoop/common/trunk/lib/jdiff/hadoop_0.20.1.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/file_system_shell.xml
      - copied unchanged from r884903, hadoop/common/trunk/src/docs/src/documentation/content/xdocs/file_system_shell.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/single_node_setup.xml
      - copied unchanged from r884903, hadoop/common/trunk/src/docs/src/documentation/content/xdocs/single_node_setup.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/resources/images/common-logo.jpg
      - copied unchanged from r884903, hadoop/common/trunk/src/docs/src/documentation/resources/images/common-logo.jpg
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/classification/
      - copied from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/classification/
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/classification/InterfaceAudience.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/classification/InterfaceAudience.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/classification/InterfaceStability.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/classification/InterfaceStability.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/AbstractFileSystem.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/AbstractFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/ChecksumFs.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/ChecksumFs.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/DelegateToFileSystem.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/DelegateToFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FileAlreadyExistsException.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileAlreadyExistsException.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FileContext.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileContext.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FilterFs.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/FilterFs.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FsConfig.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsConfig.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FsConstants.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsConstants.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FsServerDefaults.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsServerDefaults.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/LocalFileSystemConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/LocalFileSystemConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/Options.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/Options.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/ParentNotDirectoryException.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/ParentNotDirectoryException.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/ftp/FTPFileSystemConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/ftp/FTPFileSystemConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/ftp/FtpConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/ftp/FtpFs.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/ftp/FtpFs.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/kfs/KFSConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/kfs/KFSConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/local/
      - copied from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/local/
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/local/LocalConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/local/LocalConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/local/LocalFs.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/local/LocalFs.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/local/RawLocalFs.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/local/RawLocalFs.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/ChmodParser.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/permission/ChmodParser.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/PermissionParser.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/permission/PermissionParser.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/UmaskParser.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/permission/UmaskParser.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/s3/S3FileSystemConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3/S3FileSystemConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3native/S3NativeFileSystemConfigKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/http/HtmlQuoting.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/http/HtmlQuoting.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/SplitCompressionInputStream.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplitCompressionInputStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/DeserializerBase.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/DeserializerBase.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/LegacyDeserializer.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/LegacyDeserializer.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/LegacySerialization.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/LegacySerialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/LegacySerializer.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/LegacySerializer.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/SerializationBase.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/SerializationBase.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/SerializerBase.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/SerializerBase.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/avro/AvroGenericSerialization.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroGenericSerialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/ipc/AvroRpc.java
      - copied unchanged from r884903, hadoop/common/trunk/src/java/org/apache/hadoop/ipc/AvroRpc.java
    hadoop/common/branches/HADOOP-6194/src/test/aop/
      - copied from r884903, hadoop/common/trunk/src/test/aop/
    hadoop/common/branches/HADOOP-6194/src/test/aop/build/
      - copied from r884903, hadoop/common/trunk/src/test/aop/build/
    hadoop/common/branches/HADOOP-6194/src/test/aop/build/aop.xml
      - copied unchanged from r884903, hadoop/common/trunk/src/test/aop/build/aop.xml
    hadoop/common/branches/HADOOP-6194/src/test/aop/org/
      - copied from r884903, hadoop/common/trunk/src/test/aop/org/
    hadoop/common/branches/HADOOP-6194/src/test/aop/org/apache/
      - copied from r884903, hadoop/common/trunk/src/test/aop/org/apache/
    hadoop/common/branches/HADOOP-6194/src/test/aop/org/apache/hadoop/
      - copied from r884903, hadoop/common/trunk/src/test/aop/org/apache/hadoop/
    hadoop/common/branches/HADOOP-6194/src/test/aop/org/apache/hadoop/fi/
      - copied from r884903, hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/
    hadoop/common/branches/HADOOP-6194/src/test/aop/org/apache/hadoop/fi/FiConfig.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java
    hadoop/common/branches/HADOOP-6194/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/conf/TestConfigurationDeprecation.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/conf/TestConfigurationDeprecation.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/conf/TestDeprecatedKeys.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/conf/TestDeprecatedKeys.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextCreateMkdirBaseTest.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/FileContextPermissionBase.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextPermissionBase.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/FileContextURIBase.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/FileContextURIBase.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestFcLocalFsPermission.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestFcLocalFsPermission.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestFileSystemCaching.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestFileSystemCaching.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/http/TestHttpServer.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/http/TestHttpServer.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/AvroTestProtocol.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/AvroTestProtocol.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestGenericOptionsParser.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/util/TestGenericOptionsParser.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestHostsFileReader.java
      - copied unchanged from r884903, hadoop/common/trunk/src/test/core/org/apache/hadoop/util/TestHostsFileReader.java
    hadoop/common/branches/HADOOP-6194/src/test/fi-site.xml
      - copied unchanged from r884903, hadoop/common/trunk/src/test/fi-site.xml
Removed:
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/SLG_user_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/commands_manual.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/distcp.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hadoop_archives.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hdfs_design.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hdfs_quota_admin_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hdfs_shell.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hdfs_user_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hod_admin_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hod_config_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hod_user_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/quickstart.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/vaidya.xml
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/file/tfile/BoundedByteArrayOutputStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/AccessKey.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/AccessToken.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/AccessTokenHandler.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/ExportedAccessKeys.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/InvalidAccessTokenException.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/LinuxMemoryCalculatorPlugin.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/MemoryCalculatorPlugin.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/ProcessTree.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/ProcfsBasedProcessTree.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/SecurityTestUtil.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/TestAccessToken.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestProcfsBasedProcessTree.java
Modified:
    hadoop/common/branches/HADOOP-6194/   (props changed)
    hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath
    hadoop/common/branches/HADOOP-6194/.gitignore
    hadoop/common/branches/HADOOP-6194/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HADOOP-6194/bin/hadoop-config.sh
    hadoop/common/branches/HADOOP-6194/build.xml
    hadoop/common/branches/HADOOP-6194/ivy/   (props changed)
    hadoop/common/branches/HADOOP-6194/ivy.xml
    hadoop/common/branches/HADOOP-6194/ivy/ivysettings.xml
    hadoop/common/branches/HADOOP-6194/ivy/libraries.properties
    hadoop/common/branches/HADOOP-6194/ivybuild.xml
    hadoop/common/branches/HADOOP-6194/src/contrib/build-contrib.xml
    hadoop/common/branches/HADOOP-6194/src/contrib/ec2/   (props changed)
    hadoop/common/branches/HADOOP-6194/src/docs/   (props changed)
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/hdfs_permissions_guide.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/index.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/native_libraries.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/service_level_auth.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/site.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/content/xdocs/tabs.xml
    hadoop/common/branches/HADOOP-6194/src/docs/src/documentation/skinconf.xml
    hadoop/common/branches/HADOOP-6194/src/java/   (props changed)
    hadoop/common/branches/HADOOP-6194/src/java/core-default.xml
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/DF.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FSDataOutputStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FSInputChecker.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FileStatus.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FileUtil.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FilterFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FsShell.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/FsShellPermissions.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/Path.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/Syncable.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/Trash.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/FsPermission.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/http/FilterInitializer.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/DefaultStringifier.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/WritableUtils.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/BZip2Codec.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/CodecPool.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/Compressor.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/DecompressorStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/GzipCodec.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/bzip2/BZip2Constants.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/bzip2/BZip2DummyCompressor.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/file/tfile/TFile.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/Deserializer.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/Serialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/Serializer.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/net/ScriptBasedMapping.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/GenericOptionsParser.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/HostsFileReader.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/NativeCodeLoader.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/PureJavaCrc32.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/ReflectionUtils.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/Service.java
    hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/util/Shell.java
    hadoop/common/branches/HADOOP-6194/src/saveVersion.sh
    hadoop/common/branches/HADOOP-6194/src/test/bin/test-patch.sh
    hadoop/common/branches/HADOOP-6194/src/test/core/   (props changed)
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestLocalFileSystem.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/TestPath.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/permission/TestFsPermission.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/http/TestGlobalFilter.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/http/TestServletFilter.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/file/tfile/TestTFile.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/SerializationTestUtil.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestGenericsUtil.java
    hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java

Propchange: hadoop/common/branches/HADOOP-6194/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Sat Nov 28 19:53:33 2009
@@ -1,4 +1,6 @@
 build
+build-fi
+build.properties
 logs
 .classpath
 .externalToolBuilders

Propchange: hadoop/common/branches/HADOOP-6194/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 19:53:33 2009
@@ -1,2 +1,2 @@
-/hadoop/common/trunk:804966-807681
+/hadoop/common/trunk:804966-884903
 /hadoop/core/branches/branch-0.19/core:713112

Modified: hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath?rev=885142&r1=885141&r2=885142&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath (original)
+++ hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath Sat Nov 28 19:53:33 2009
@@ -1,9 +1,11 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <classpath>
 	<classpathentry kind="src" path="src/java"/>
+	<classpathentry kind="src" path="src/test/aop"/>
 	<classpathentry kind="src" path="src/test/core"/>
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
 	<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/avro-1.2.0.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-cli-1.2.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-codec-1.3.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-el-1.0.jar"/>
@@ -12,10 +14,12 @@
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-logging-api-1.0.4.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/commons-net-1.4.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/core-3.1.1.jar"/>
+        <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jackson-core-asl-1.0.1.jar"/>
+        <classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jackson-mapper-asl-1.0.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/hsqldb-1.8.0.10.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jasper-compiler-5.5.12.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jasper-runtime-5.5.12.jar"/>
-	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jets3t-0.6.1.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jets3t-0.7.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jetty-6.1.14.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jetty-util-6.1.14.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/jsp-2.1-6.1.14.jar"/>
@@ -25,7 +29,7 @@
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/log4j-1.2.15.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/oro-2.0.8.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/servlet-api-2.5-6.1.14.jar"/>
-	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/slf4j-api-1.4.3.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/slf4j-api-1.5.8.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/slf4j-log4j12-1.4.3.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/common/xmlenc-0.52.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop-Core/test/ftplet-api-1.0.0.jar"/>

Modified: hadoop/common/branches/HADOOP-6194/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/.gitignore?rev=885142&r1=885141&r2=885142&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/.gitignore (original)
+++ hadoop/common/branches/HADOOP-6194/.gitignore Sat Nov 28 19:53:33 2009
@@ -19,6 +19,8 @@
 .settings
 .svn
 build/
+build-fi/
+build.properties
 conf/masters
 conf/slaves
 conf/hadoop-env.sh
@@ -30,6 +32,10 @@
 conf/capacity-scheduler.xml
 conf/mapred-queue-acls.xml
 docs/api/
+ivy/hadoop-core.xml
+ivy/hadoop-core-test.xml
+ivy/ivy-*.jar
+ivy/maven-ant-tasks-*.jar
 logs/
 src/contrib/ec2/bin/hadoop-ec2-env.sh
 src/docs/build

Modified: hadoop/common/branches/HADOOP-6194/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/CHANGES.txt?rev=885142&r1=885141&r2=885142&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/CHANGES.txt (original)
+++ hadoop/common/branches/HADOOP-6194/CHANGES.txt Sat Nov 28 19:53:33 2009
@@ -4,6 +4,54 @@
 
   INCOMPATIBLE CHANGES
 
+  NEW FEATURES
+
+    HADOOP-6284. Add a new parameter, HADOOP_JAVA_PLATFORM_OPTS, to
+    hadoop-config.sh so that it allows setting java command options for
+    JAVA_PLATFORM.  (Koji Noguchi via szetszwo)
+
+    HADOOP-6337. Updates FilterInitializer class to be more visible,
+    and the init of the class is made to take a Configuration argument.
+    (Jakob Homan via ddas)
+
+  IMPROVEMENTS
+
+    HADOOP-6283. Improve the exception messages thrown by
+    FileUtil$HardLink.getLinkCount(..).  (szetszwo)
+
+    HADOOP-6279. Add Runtime::maxMemory to JVM metrics. (Todd Lipcon via
+    cdouglas)
+
+    HADOOP-6305. Unify build property names to facilitate cross-projects
+    modifications (cos)
+
+    HADOOP-6204. Implementing aspects development and fault injeciton
+    framework for Hadoop (cos)
+
+    HADOOP-6312. Remove unnecessary debug logging in Configuration constructor.
+    (Aaron Kimball via cdouglas)
+
+    HADOOP-6326. Hundson runs should check for AspectJ warnings and report
+    failure if any is present (cos)
+
+    HADOOP-6366. Reduce ivy console output to ovservable level (cos)
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+    HADOOP-6293. Fix FsShell -text to work on filesystems other than the
+    default. (cdouglas)
+
+    HADOOP-6341. Fix test-patch.sh for checkTests function. (gkesavan)
+
+    HADOOP-6314. Fix "fs -help" for the "-count" commond.  (Ravi Phulari via
+    szetszwo)
+
+Release 0.21.0 - Unreleased
+
+  INCOMPATIBLE CHANGES
+
     HADOOP-4895. Remove deprecated methods DFSClient.getHints(..) and
     DFSClient.isDirectory(..).  (szetszwo)
 
@@ -78,6 +126,16 @@
     FileNotFoundException if the directory does not exist, rather than letting
     this be implementation-specific. (Jakob Homan via cdouglas)
 
+    HADOOP-6230. Moved process tree and memory calculator related classes
+    from Common to Map/Reduce. (Vinod Kumar Vavilapalli via yhemanth)
+
+    HADOOP-6203. FsShell rm/rmr error message indicates exceeding Trash quota
+    and suggests using -skpTrash, when moving to trash fails.
+    (Boris Shkolnik via suresh)
+
+    HADOOP-6303. Eclipse .classpath template has outdated jar files and is
+    missing some new ones.  (cos)
+
   NEW FEATURES
 
     HADOOP-4268. Change fsck to use ClientProtocol methods so that the
@@ -162,6 +220,50 @@
     HADOOP-6120. Add support for Avro specific and reflect data.
     (sharad via cutting)
 
+    HADOOP-6226. Moves BoundedByteArrayOutputStream from the tfile package to
+    the io package and makes it available to other users (MAPREDUCE-318). 
+    (Jothi Padmanabhan via ddas)
+
+    HADOOP-6165. Add metadata to Serializations. (tomwhite)
+
+    HADOOP-6105. Adds support for automatically handling deprecation of
+    configuration keys. (V.V.Chaitanya Krishna via yhemanth)
+    
+    HADOOP-6235. Adds new method to FileSystem for clients to get server
+    defaults. (Kan Zhang via suresh)
+
+    HADOOP-6234. Add new option dfs.umaskmode to set umask in configuration
+    to use octal or symbolic instead of decimal. (Jakob Homan via suresh)
+
+    HADOOP-5073. Add annotation mechanism for interface classification.
+    (Jakob Homan via suresh)
+
+    HADOOP-4012. Provide splitting support for bzip2 compressed files. (Abdul
+    Qadeer via cdouglas)
+
+    HADOOP-6246. Add backward compatibility support to use deprecated decimal 
+    umask from old configuration. (Jakob Homan via suresh)
+
+    HADOOP-4952. Add new improved file system interface FileContext for the
+    application writer (Sanjay Radia via suresh)
+
+    HADOOP-6170. Add facility to tunnel Avro RPCs through Hadoop RPCs.
+    This permits one to take advantage of both Avro's RPC versioning
+    features and Hadoop's proven RPC scalability.  (cutting)
+
+    HADOOP-6267. Permit building contrib modules located in external
+    source trees.  (Todd Lipcon via cutting)
+
+    HADOOP-6240. Add new FileContext rename operation that posix compliant
+    that allows overwriting existing destination. (suresh)
+
+    HADOOP-6313. Implement Syncable interface in FSDataOutputStream to expose
+    flush APIs to application users. (Hairong Kuang via suresh)
+
+    Hadoop-6223. Add new file system interface AbstractFileSystem with
+    implementation of some file systems that delegate to old FileSystem.
+    (Sanjay Radia via suresh)
+
   IMPROVEMENTS
 
     HADOOP-4565. Added CombineFileInputFormat to use data locality information
@@ -421,8 +523,8 @@
     TestNameEditsConfig, TestStartup and TestStorageRestore.
     (Jakob Homan via shv)
 
-    HADOOP-5438. Provide a single FileSystem method to create or open-for-append
-    to a file.  (He Yongqiang via dhruba)
+    HADOOP-5438. Provide a single FileSystem method to create or 
+    open-for-append to a file.  (He Yongqiang via dhruba)
 
     HADOOP-5472. Change DistCp to support globbing of input paths.  (Dhruba
     Borthakur and Rodrigo Schmidt via szetszwo)
@@ -495,19 +597,68 @@
     HADOOP-6160. Fix releaseaudit target to run on specific directories.
     (gkesavan)
     
-    HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)
+    HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via 
+    mahadev)
 
     HADOOP-6176. Add a couple package private methods to AccessTokenHandler
     for testing.  (Kan Zhang via szetszwo)
 
-    HADOOP-6182. Fix ReleaseAudit warnings (Giridharan Kesavan and Lee Tucker via gkesavan)
+    HADOOP-6182. Fix ReleaseAudit warnings (Giridharan Kesavan and Lee Tucker
+    via gkesavan)
 
     HADOOP-6173. Change src/native/packageNativeHadoop.sh to package all
     native library files.  (Hong Tang via szetszwo)
 
     HADOOP-6184. Provide an API to dump Configuration in a JSON format.
     (V.V.Chaitanya Krishna via yhemanth)
- 
+
+    HADOOP-6224. Add a method to WritableUtils performing a bounded read of an
+    encoded String. (Jothi Padmanabhan via cdouglas)
+
+    HADOOP-6133. Add a caching layer to Configuration::getClassByName to
+    alleviate a performance regression introduced in a compatibility layer.
+    (Todd Lipcon via cdouglas)
+
+    HADOOP-6252. Provide a method to determine if a deprecated key is set in
+    config file. (Jakob Homan via suresh)
+
+    HADOOP-5879. Read compression level and strategy from Configuration for
+    gzip compression. (He Yongqiang via cdouglas)
+
+    HADOOP-6216. Support comments in host files.  (Ravi Phulari and Dmytro
+    Molkov via szetszwo)
+
+    HADOOP-6217. Update documentation for project split. (Corinne Chandel via 
+    omalley)
+
+    HADOOP-6268. Add ivy jar to .gitignore. (Todd Lipcon via cdouglas)
+
+    HADOOP-6270. Support deleteOnExit in FileContext.  (Suresh Srinivas via
+    szetszwo)
+
+    HADOOP-6233. Rename configuration keys towards API standardization and
+    backward compatibility. (Jithendra Pandey via suresh)
+
+    HADOOP-6260. Add additional unit tests for FileContext util methods.
+    (Gary Murry via suresh).
+
+    HADOOP-6309. Change build.xml to run tests with java asserts.  (Eli
+    Collins via szetszwo)
+
+    HADOOP-6329. Add build-fi directory to the ignore lists.  (szetszwo)
+
+    HADOOP-5107. Use Maven ant tasks to publish the subproject jars.
+    (Giridharan Kesavan via omalley)
+
+    HADOOP-6343. Log unexpected throwable object caught in RPC.  (Jitendra Nath
+    Pandey via szetszwo)
+
+    HADOOP-6367. Removes Access Token implementation from common.
+    (Kan Zhang via ddas)
+
+    HADOOP-6395. Upgrade some libraries to be consistent across common, hdfs,
+    and mapreduce. (omalley)
+
   OPTIMIZATIONS
 
     HADOOP-5595. NameNode does not need to run a replicator to choose a
@@ -520,6 +671,15 @@
     HADOOP-6180. NameNode slowed down when many files with same filename
     were moved to Trash. (Boris Shkolnik via hairong)
 
+    HADOOP-6166. Further improve the performance of the pure-Java CRC32
+    implementation. (Tsz Wo (Nicholas), SZE via cdouglas)
+
+    HADOOP-6271. Add recursive and non recursive create and mkdir to 
+    FileContext. (Sanjay Radia via suresh)
+
+    HADOOP-6261. Add URI based tests for FileContext. 
+    (Ravi Pulari via suresh).
+
   BUG FIXES
     
     HADOOP-5379. CBZip2InputStream to throw IOException on data crc error.
@@ -603,8 +763,9 @@
 
     HADOOP-5292. Fix NPE in KFS::getBlockLocations. (Sriram Rao via lohit)
 
-    HADOOP-5219. Adds a new property io.seqfile.local.dir for use by SequenceFile,
-    which earlier used mapred.local.dir. (Sharad Agarwal via ddas)
+    HADOOP-5219. Adds a new property io.seqfile.local.dir for use by
+    SequenceFile, which earlier used mapred.local.dir. (Sharad Agarwal
+    via ddas)
 
     HADOOP-5300. Fix ant javadoc-dev target and the typo in the class name
     NameNodeActivtyMBean.  (szetszwo)
@@ -621,7 +782,7 @@
     HADOOP-5383. Avoid building an unused string in NameNode's 
     verifyReplication(). (Raghu Angadi)
 
-    HADOOP-5347. Create a job output directory for the bbp examples.  (szetszwo)
+    HADOOP-5347. Create a job output directory for the bbp examples. (szetszwo)
 
     HADOOP-5341. Make hadoop-daemon scripts backwards compatible with the
     changes in HADOOP-4868. (Sharad Agarwal via yhemanth)
@@ -727,8 +888,8 @@
     in the JobTracker to get the FileSystem objects as per the JobTracker's
     configuration. (Amar Kamat via ddas) 
 
-    HADOOP-5648. Not able to generate gridmix.jar on the already compiled version of hadoop.
-    (gkesavan)	
+    HADOOP-5648. Not able to generate gridmix.jar on the already compiled 
+    version of hadoop. (gkesavan)	
 
     HADOOP-5808. Fix import never used javac warnings in hdfs. (szetszwo)
 
@@ -801,10 +962,6 @@
     HADOOP-5809. Fix job submission, broken by errant directory creation.
     (Sreekanth Ramakrishnan and Jothi Padmanabhan via cdouglas)
 
-    HADOOP-5759. Fix for  IllegalArgumentException when 
-    CombineFileInputFormat is used as job InputFormat.
-    (Amareshwari Sriramadasu via dhruba)
-
     HADOOP-5635. Change distributed cache to work with other distributed file
     systems. (Andrew Hitchcock via tomwhite)
 
@@ -812,7 +969,8 @@
     in DataBlockScanner.  (Kan Zhang via szetszwo)
 
     HADOOP-4864. Fixes a problem to do with -libjars with multiple jars when
-    client and cluster reside on different OSs. (Amareshwari Sriramadasu via ddas)
+    client and cluster reside on different OSs. (Amareshwari Sriramadasu via 
+    ddas)
 
     HADOOP-5623. Fixes a problem to do with status messages getting overwritten
     in streaming jobs. (Rick Cox and Jothi Padmanabhan via ddas)
@@ -820,8 +978,8 @@
     HADOOP-5895. Fixes computation of count of merged bytes for logging.
     (Ravi Gummadi via ddas)
 
-    HADOOP-5805. problem using top level s3 buckets as input/output directories.
-    (Ian Nowland via tomwhite)
+    HADOOP-5805. problem using top level s3 buckets as input/output 
+    directories. (Ian Nowland via tomwhite)
    
     HADOOP-5940. trunk eclipse-plugin build fails while trying to copy 
     commons-cli jar from the lib dir (Giridharan Kesavan via gkesavan)
@@ -920,8 +1078,9 @@
     HADOOP-6123. Add missing classpaths in hadoop-config.sh.  (Sharad Agarwal
     via szetszwo)
 
-    HADOOP-6172. Fix jar file names in hadoop-config.sh and include ${build.src}
-    as a part of the source list in build.xml.  (Hong Tang via szetszwo)
+    HADOOP-6172. Fix jar file names in hadoop-config.sh and include 
+    ${build.src} as a part of the source list in build.xml.  (Hong Tang via 
+    szetszwo)
 
     HADOOP-6124. Fix javac warning detection in test-patch.sh.  (Giridharan
     Kesavan via szetszwo)
@@ -941,7 +1100,97 @@
     HADOOP-6152. Fix classpath variables in bin/hadoop-config.sh and some
     other scripts.  (Aaron Kimball via szetszwo)
 
-Release 0.20.1 - Unreleased
+    HADOOP-6215. fix GenericOptionParser to deal with -D with '=' in the 
+    value. (Amar Kamat via sharad)
+
+    HADOOP-6227. Fix Configuration to allow final parameters to be set to null
+    and prevent them from being overridden.
+    (Amareshwari Sriramadasu via yhemanth)
+
+    HADOOP-6199. Move io.map.skip.index property to core-default from mapred.
+    (Amareshwari Sriramadasu via cdouglas)
+
+    HADOOP-6229. Attempt to make a directory under an existing file on
+    LocalFileSystem should throw an Exception. (Boris Shkolnik via tomwhite)
+
+    HADOOP-6243. Fix a NullPointerException in processing deprecated keys.
+    (Sreekanth Ramakrishnan via yhemanth)
+
+    HADOOP-6009. S3N listStatus incorrectly returns null instead of empty
+    array when called on empty root. (Ian Nowland via tomwhite)
+
+    HADOOP-6181.  Fix .eclipse.templates/.classpath for avro and jets3t jar
+    files.  (Carlos Valiente via szetszwo)
+
+    HADOOP-6196. Fix a bug in SequenceFile.Reader where syncing within the
+    header would cause the reader to read the sync marker as a record. (Jay
+    Booth via cdouglas)
+
+    HADOOP-6250. Modify test-patch to delete copied XML files before running
+    patch build. (Rahul Kumar Singh via yhemanth)
+
+    HADOOP-6257. Two TestFileSystem classes are confusing
+    hadoop-hdfs-hdfwithmr. (Philip Zeyliger via tomwhite)
+
+    HADOOP-6151. Added a input filter to all of the http servlets that quotes
+    html characters in the parameters, to prevent cross site scripting 
+    attacks. (omalley)
+
+    HADOOP-6274. Fix TestLocalFSFileContextMainOperations test failure.
+    (Gary Murry via suresh).
+
+    HADOOP-6281. Avoid null pointer exceptions when the jsps don't have 
+    paramaters (omalley)
+
+    HADOOP-6285. Fix the result type of the getParameterMap method in the
+    HttpServer.QuotingInputFilter. (omalley)
+
+    HADOOP-6286. Fix bugs in related to URI handling in glob methods in 
+    FileContext. (Boris Shkolnik via suresh)
+
+    HADOOP-6292. Update native libraries guide. (Corinne Chandel via cdouglas)
+
+    HADOOP-6327. FileContext tests should not use /tmp and should clean up
+    files.  (Sanjay Radia via szetszwo)
+
+    HADOOP-6318. Upgrade to Avro 1.2.0.  (cutting)
+
+    HADOOP-6334.  Fix GenericOptionsParser to understand URI for -files,
+    -libjars and -archives options and fix Path to support URI with fragment.
+    (Amareshwari Sriramadasu via szetszwo)
+
+    HADOOP-6344. Fix rm and rmr immediately delete files rather than sending 
+    to trash, if a user is over-quota. (Jakob Homan via suresh)
+
+    HADOOP-6347. run-test-core-fault-inject runs a test case twice if
+    -Dtestcase is set (cos)
+
+    HADOOP-6375. Sync documentation for FsShell du with its implementation.
+    (Todd Lipcon via cdouglas)
+
+Release 0.20.2 - Unreleased
+
+  NEW FEATURES
+
+    HADOOP-6218. Adds a feature where TFile can be split by Record
+    Sequence number. (Hong Tang and Raghu Angadi via ddas)
+
+  BUG FIXES
+
+    HADOOP-6231. Allow caching of filesystem instances to be disabled on a
+    per-instance basis. (tomwhite)
+
+    HADOOP-5759. Fix for IllegalArgumentException when CombineFileInputFormat
+    is used as job InputFormat. (Amareshwari Sriramadasu via dhruba)
+
+    HADOOP-6097. Fix Path conversion in makeQualified and reset LineReader byte
+    count at the start of each block in Hadoop archives. (Ben Slusky, Tom
+    White, and Mahadev Konar via cdouglas)
+
+    HADOOP-6269. Fix threading issue with defaultResource in Configuration.
+    (Sreekanth Ramakrishnan via cdouglas)
+
+Release 0.20.1 - 2009-09-01
 
   INCOMPATIBLE CHANGES
 

Propchange: hadoop/common/branches/HADOOP-6194/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 19:53:33 2009
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/CHANGES.txt:804966-807681
+/hadoop/common/trunk/CHANGES.txt:804966-884903
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HADOOP-6194/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/bin/hadoop-config.sh?rev=885142&r1=885141&r2=885142&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/HADOOP-6194/bin/hadoop-config.sh Sat Nov 28 19:53:33 2009
@@ -183,7 +183,7 @@
 # setup 'java.library.path' for native-hadoop code if necessary
 JAVA_LIBRARY_PATH=''
 if [ -d "${HADOOP_CORE_HOME}/build/native" -o -d "${HADOOP_CORE_HOME}/lib/native" ]; then
-  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
+  JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
   
   if [ -d "$HADOOP_CORE_HOME/build/native" ]; then
     JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/build/native/${JAVA_PLATFORM}/lib

Modified: hadoop/common/branches/HADOOP-6194/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/build.xml?rev=885142&r1=885141&r2=885142&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/build.xml (original)
+++ hadoop/common/branches/HADOOP-6194/build.xml Sat Nov 28 19:53:33 2009
@@ -18,7 +18,8 @@
 -->
 
 <project name="hadoop-common" default="compile"
-   xmlns:ivy="antlib:org.apache.ivy.ant"> 
+   xmlns:ivy="antlib:org.apache.ivy.ant"
+   xmlns:artifact="urn:maven-artifact-ant"> 
 
   <!-- Load all the default properties, and any the user wants    -->
   <!-- to contribute (without having to type -D or edit this file -->
@@ -27,13 +28,13 @@
  
   <property name="Name" value="Hadoop-common"/>
   <property name="name" value="hadoop-common"/>
-  <property name="version" value="0.21.0-dev"/>
+  <property name="version" value="0.22.0-SNAPSHOT"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="test.final.name" value="${name}-test-${version}"/>
   <property name="year" value="2009"/>
 
   <property name="src.dir" value="${basedir}/src"/>  	
-  <property name="core.src.dir" value="${src.dir}/java"/>
+  <property name="java.src.dir" value="${src.dir}/java"/>
   <property name="native.src.dir" value="${basedir}/src/native"/>
 
   <property name="lib.dir" value="${basedir}/lib"/>
@@ -111,7 +112,7 @@
 
   <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
   <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
-  <property name="jdiff.stable" value="0.20.0"/>
+  <property name="jdiff.stable" value="0.20.1"/>
   <property name="jdiff.stable.javadoc" 
             value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
 
@@ -123,10 +124,16 @@
 
 	
   <!-- IVY properteis set here -->
+  <property name="ivy.repo.dir" value="${user.home}/ivyrepo" />
   <property name="ivy.dir" location="ivy" />
   <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
+  <property name="asfrepo" value="https://repository.apache.org/content/repositories/snapshots"/>
+  <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
   <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
-  <property name="ivy_repo_url" value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
+  <property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
+  <property name="ant_task_repo_url" 
+     value="${mvnrepo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
+  <property name="ivy_repo_url" value="${mvnrepo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
   <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
   <property name="ivy.org" value="org.apache.hadoop"/>
   <property name="build.dir" location="build" />
@@ -134,17 +141,20 @@
   <property name="build.ivy.dir" location="${build.dir}/ivy" />
   <property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
   <property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
-  <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
-  <property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
-  <property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/hadoop-core-${version}.pom" />
-  <property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/hadoop-core-${version}.jar" />
+  <property name="build.ivy.report.dir" location="${build.ivy.dir}/report"/>
+  <property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven"/>
+  <property name="pom.xml" location="${build.ivy.maven.dir}/pom.xml"/>
+  <property name="hadoop-core.pom" location="${ivy.dir}/hadoop-core.xml"/>
+  <property name="build.ivy.maven.core.jar" location="${build.ivy.maven.dir}/hadoop-core-${version}.jar"/>
+  <property name="hadoop-core-test.pom" location="${ivy.dir}/hadoop-core-test.xml" />
+  <property name="build.ivy.maven.core-test.jar" location="${build.ivy.maven.dir}/hadoop-core-test-${version}.jar"/>
 
   <!--this is the naming policy for artifacts we want pulled down-->
   <property name="ivy.module" location="hadoop-core" />
   <property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
 
   <!--this is how artifacts that get built are named-->
-  <property name="ivy.publish.pattern" value="hadoop-common-[revision].[ext]"/>
+  <property name="ivy.publish.pattern" value="[artifact]-[revision].[ext]"/>
   <property name="hadoop-core.jar" location="${build.dir}/${final.name}.jar" />
   <property name="hadoop-core-test.jar" location="${build.dir}/${test.final.name}.jar" />
 
@@ -171,7 +181,7 @@
     <path refid="ivy-common.classpath"/>
   </path>
 
-  <path id="test.core.classpath">
+  <path id="test.classpath">
     <pathelement location="${test.build.extraconf}"/>
     <pathelement location="${test.core.build.classes}" />
     <pathelement location="${test.src.dir}"/>
@@ -185,7 +195,7 @@
 <!--
   <path id="test.hdfs.classpath">
     <pathelement location="${test.hdfs.build.classes}" />
-    <path refid="test.core.classpath"/>
+    <path refid="test.classpath"/>
   </path>
 
   <path id="test.mapred.classpath">
@@ -252,7 +262,7 @@
     </copy>
 
     <exec executable="sh">
-       <arg line="src/saveVersion.sh ${version}"/>
+       <arg line="src/saveVersion.sh ${version} ${build.dir}"/>
     </exec>
 	
    <exec executable="sh">
@@ -260,20 +270,22 @@
    </exec>
   </target>
 
+  <import file="${test.src.dir}/aop/build/aop.xml"/>
+
   <!-- ====================================================== -->
   <!-- Compile the Java files                                 -->
   <!-- ====================================================== -->
   <target name="record-parser" depends="init" if="javacc.home">
       <javacc
-          target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
-          outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
+          target="${java.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
+          outputdirectory="${java.src.dir}/org/apache/hadoop/record/compiler/generated"
           javacchome="${javacc.home}" />
   </target>
   
   <target name="compile-rcc-compiler" depends="init, record-parser">
     <javac 
         encoding="${build.encoding}" 
-        srcdir="${core.src.dir}"
+        srcdir="${java.src.dir}"
         includes="org/apache/hadoop/record/compiler/**/*.java"
         destdir="${build.classes}"
         debug="${javac.debug}"
@@ -294,7 +306,7 @@
     <!-- Compile Java files (excluding JSPs) checking warnings -->
     <javac 
      encoding="${build.encoding}" 
-     srcdir="${core.src.dir};${build.src}"	
+     srcdir="${java.src.dir};${build.src}"	
      includes="org/apache/hadoop/**/*.java"
      destdir="${build.classes}"
      debug="${javac.debug}"
@@ -307,8 +319,8 @@
     </javac>
 
     <copy todir="${build.classes}">
-      <fileset dir="${core.src.dir}" includes="**/*.properties"/>
-      <fileset dir="${core.src.dir}" includes="core-default.xml"/>
+      <fileset dir="${java.src.dir}" includes="**/*.properties"/>
+      <fileset dir="${java.src.dir}" includes="core-default.xml"/>
     </copy>
      
   </target>
@@ -363,7 +375,7 @@
   </target>
 
   <target name="compile-contrib" depends="compile-core">
-     <subant target="compile">
+     <subant target="compile" inheritall="true">
         <property name="version" value="${version}"/>
         <fileset file="${contrib.dir}/build.xml"/>
      </subant>  	
@@ -381,7 +393,8 @@
     <tar compression="gzip" destfile="${build.classes}/bin.tgz">
       <tarfileset dir="bin" mode="755"/>
     </tar>
-    <jar jarfile="${hadoop-core.jar}"
+    <property name="jar.properties.list" value="commons-logging.properties, log4j.properties, hadoop-metrics.properties" />
+    <jar jarfile="${build.dir}/${final.name}.jar"
          basedir="${build.classes}">
       <manifest>
         <section name="org/apache/hadoop">
@@ -390,13 +403,11 @@
           <attribute name="Implementation-Vendor" value="Apache"/>
         </section>
       </manifest>
-      <fileset file="${conf.dir}/commons-logging.properties"/>
-      <fileset file="${conf.dir}/log4j.properties"/>
-      <fileset file="${conf.dir}/hadoop-metrics.properties"/>
+      <fileset dir="${conf.dir}" includes="${jar.properties.list}" />
+      <fileset file="${jar.extra.properties.list}" />
     </jar>
   </target>
 
-
   <!-- ================================================================== -->
   <!-- Make the Hadoop metrics jar. (for use outside Hadoop)              -->
   <!-- ================================================================== -->
@@ -419,7 +430,7 @@
 
   <target name="generate-avro-records" depends="init, ivy-retrieve-test">
     <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
-      <classpath refid="test.core.classpath"/>
+      <classpath refid="test.classpath"/>
     </taskdef>
     <schema destdir="${test.generated.dir}">
       <fileset dir="${test.src.dir}">
@@ -431,6 +442,10 @@
   <!-- ================================================================== -->
   <!-- Compile test code                                                  --> 
   <!-- ================================================================== -->
+  <!-- This is a wrapper for fault-injection needs-->
+  <target name="-classes-compilation"
+    depends="compile-core-classes, compile-core-test"/> 
+
   <target name="compile-core-test" depends="compile-core-classes, ivy-retrieve-test, generate-test-records, generate-avro-records">
     <mkdir dir="${test.core.build.classes}"/>
     <javac 
@@ -444,7 +459,7 @@
      source="${javac.version}"
      deprecation="${javac.deprecation}">
       <compilerarg line="${javac.args}"/>
-      <classpath refid="test.core.classpath"/>
+      <classpath refid="test.classpath"/>
     </javac>
     <javac 
      encoding="${build.encoding}" 
@@ -457,9 +472,17 @@
      source="${javac.version}"
      deprecation="${javac.deprecation}">
       <compilerarg line="${javac.args} ${javac.args.warnings}" />
-      <classpath refid="test.core.classpath"/>
+      <classpath refid="test.classpath"/>
      </javac>
 
+    <taskdef
+       name="paranamer" 
+       classname="com.thoughtworks.paranamer.ant.ParanamerGeneratorTask">
+      <classpath refid="classpath" />
+    </taskdef>
+    <paranamer sourceDirectory="${test.src.dir}/core"
+	       outputDirectory="${test.core.build.classes}"/>
+
     <delete dir="${test.cache.data}"/>
     <mkdir dir="${test.cache.data}"/>
     <copy file="${test.src.dir}/core/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
@@ -490,6 +513,79 @@
   </target>
 
   <!-- ================================================================== -->
+  <!-- Fault injection customization section.
+       These targets ought to be copied over to other projects and modified
+       as needed -->
+  <!-- ================================================================== -->
+  <target name="run-test-core-fault-inject" depends="injectfaults" 
+	  description="Run full set of the unit tests with fault injection">
+    <macro-run-tests-fault-inject target.name="run-test-core"
+      testcasesonly="false"/>
+  </target>
+
+  <target name="jar-test-fault-inject" depends="injectfaults" 
+    description="Make hadoop-test-fi.jar">
+    <macro-jar-test-fault-inject
+      target.name="jar-test"
+      jar.final.name="test.final.name"
+      jar.final.value="${test.final.name}-fi" />
+  </target>
+
+  <target name="jar-fault-inject" depends="injectfaults" 
+    description="Make hadoop-fi.jar">
+    <macro-jar-fault-inject
+      target.name="jar"
+      jar.final.name="final.name"
+      jar.final.value="${final.name}-fi" />
+  </target>
+
+  <!--This target is not included into the the top level list of target
+  for it serves a special "regression" testing purpose of non-FI tests in
+  FI environment -->
+  <target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
+    <fail unless="testcase">Can't run this target without -Dtestcase setting!
+    </fail>
+    <macro-run-tests-fault-inject target.name="run-test-core" 
+      testcasesonly="true"/>
+  </target>
+  <!-- ================================================================== -->
+  <!-- End of Fault injection customization section                       -->
+  <!-- ================================================================== -->
+
+  <condition property="tests.notestcase">
+    <and>
+      <isfalse value="${test.fault.inject}"/>
+      <not>
+        <isset property="testcase"/>
+      </not>
+    </and>
+  </condition>
+  <condition property="tests.notestcase.fi">
+    <and>
+      <not>
+        <isset property="testcase" />
+      </not>
+      <istrue value="${test.fault.inject}" />
+    </and>
+  </condition>
+  <condition property="tests.testcase">
+    <and>
+      <isfalse value="${test.fault.inject}" />
+      <isset property="testcase" />
+    </and>
+  </condition>
+  <condition property="tests.testcaseonly">
+    <istrue value="${special.fi.testcasesonly}" />
+  </condition>
+  <condition property="tests.testcase.fi">
+    <and>
+      <istrue value="${test.fault.inject}" />
+      <isset property="testcase" />
+      <isfalse value="${special.fi.testcasesonly}" />
+    </and>
+  </condition>
+	     
+  <!-- ================================================================== -->
   <!-- Run unit tests                                                     --> 
   <!-- ================================================================== -->
   <target name="run-test-core" depends="compile-core-test" description="Run core unit tests">
@@ -500,6 +596,8 @@
     <mkdir dir="${test.log.dir}"/>
   	<copy file="${test.src.dir}/hadoop-policy.xml" 
   	  todir="${test.build.extraconf}" />
+    <copy file="${test.src.dir}/fi-site.xml"
+      todir="${test.build.extraconf}" />
     <junit showoutput="${test.output}"
       printsummary="${test.junit.printsummary}"
       haltonfailure="${test.junit.haltonfailure}"
@@ -508,6 +606,7 @@
       maxmemory="${test.junit.maxmemory}"
       dir="${basedir}" timeout="${test.timeout}"
       errorProperty="tests.failed" failureProperty="tests.failed">
+      <jvmarg value="-ea" />
       <sysproperty key="test.build.data" value="${test.build.data}"/>
       <sysproperty key="test.cache.data" value="${test.cache.data}"/>    	
       <sysproperty key="test.debug.data" value="${test.debug.data}"/>
@@ -526,14 +625,30 @@
       <syspropertyset dynamic="no">
          <propertyref name="compile.c++"/>
       </syspropertyset>
-      <classpath refid="test.core.classpath"/>
+      <classpath refid="test.classpath"/>
+      <syspropertyset id="FaultProbabilityProperties">
+        <propertyref regex="fi.*"/>
+      </syspropertyset>
       <formatter type="${test.junit.output.format}" />
-      <batchtest todir="${test.build.dir}" unless="testcase">
+      <batchtest todir="${test.build.dir}" if="tests.notestcase">
         <fileset dir="${test.src.dir}/core"
 	         includes="**/${test.include}.java"
 		 excludes="**/${test.exclude}.java" />
       </batchtest>
-      <batchtest todir="${test.build.dir}" if="testcase">
+      <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
+        <fileset dir="${test.src.dir}/aop"
+          includes="**/${test.include}.java"
+          excludes="**/${test.exclude}.java" />
+      </batchtest>
+      <batchtest todir="${test.build.dir}" if="tests.testcase">
+        <fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
+      </batchtest>
+      <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
+        <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+      </batchtest>
+      <!--The following batch is for very special occasions only when
+      a non-FI tests are needed to be executed against FI-environment -->
+      <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
         <fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
       </batchtest>
     </junit>
@@ -546,7 +661,7 @@
   </target>
 
   <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests">
-    <subant target="test">
+    <subant target="test" inheritall="true">
        <property name="version" value="${version}"/>
        <property name="clover.jar" value="${clover.jar}"/>
        <fileset file="${contrib.dir}/build.xml"/>
@@ -557,12 +672,13 @@
     <delete file="${test.build.dir}/testsfailed"/> 
     <property name="continueOnFailure" value="true"/> 
     <antcall target="run-test-core"/>
+    <antcall target="run-test-core-fault-inject"/>
     <available file="${test.build.dir}/testsfailed" property="testsfailed"/>
     <fail if="testsfailed">Tests failed!</fail> 
   </target>
 
   <target name="test" depends="jar-test,test-core" description="Run all unit tests">
-    <subant target="test-contrib">	 
+    <subant target="test-contrib" inheritall="true">	 
       <fileset dir="." includes="build.xml"/>
     </subant>
   </target>
@@ -581,7 +697,8 @@
   <!-- ================================================================== -->
   <!-- Run optional third-party tool targets                              --> 
   <!-- ================================================================== -->
-  <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" description="Run optional third-party tool targets">
+  <target name="checkstyle" depends="ivy-retrieve-checkstyle,check-for-checkstyle" if="checkstyle.present" 
+       description="Run optional third-party tool targets">
        <taskdef resource="checkstyletask.properties">
          <classpath refid="checkstyle-classpath"/>
        </taskdef>
@@ -590,7 +707,7 @@
   	
   	<checkstyle config="${test.src.dir}/checkstyle.xml"
   		failOnViolation="false">
-      <fileset dir="${core.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
+      <fileset dir="${java.src.dir}" includes="**/*.java" excludes="**/generated/**"/>
       <formatter type="xml" toFile="${test.build.dir}/checkstyle-errors.xml"/>
   	</checkstyle>
   	
@@ -630,7 +747,7 @@
           <include name="**/*.jar"/>
         </fileset>
       </auxClasspath>
-      <sourcePath path="${core.src.dir}"/>
+      <sourcePath path="${java.src.dir}"/>
       <class location="${basedir}/build/${final.name}.jar" />
     </findbugs>
 
@@ -649,7 +766,8 @@
   <!-- Documentation                                                      -->
   <!-- ================================================================== -->
   
-  <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
+  <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. 
+       To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
     <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest"
 	  failonerror="true">
       <env key="JAVA_HOME" value="${java5.home}"/>
@@ -658,14 +776,14 @@
       <fileset dir="${docs.src}/build/site/" />
     </copy>
     <copy file="${docs.src}/releasenotes.html" todir="${build.docs}"/>
-    <style basedir="${core.src.dir}" destdir="${build.docs}"
+    <style basedir="${java.src.dir}" destdir="${build.docs}"
            includes="core-default.xml" style="conf/configuration.xsl"/>
     <antcall target="changes-to-html"/>
     <antcall target="cn-docs"/>
   </target>
 
-  <target name="cn-docs" depends="forrest.check, init" 
-       description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." 
+  <target name="cn-docs" depends="forrest.check, init" description="Generate forrest-based Chinese documentation. 
+        To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." 
         if="forrest.home">
     <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true">
       <env key="LANG" value="en_US.utf8"/>
@@ -674,23 +792,25 @@
     <copy todir="${build.docs.cn}">
       <fileset dir="${src.docs.cn}/build/site/" />
     </copy>
-    <style basedir="${core.src.dir}" destdir="${build.docs.cn}"
+    <style basedir="${java.src.dir}" destdir="${build.docs.cn}"
            includes="core-default.xml" style="conf/configuration.xsl"/>
     <antcall target="changes-to-html"/>
   </target>
 
   <target name="forrest.check" unless="forrest.home" depends="java5.check">
-    <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
+    <fail message="'forrest.home' is not defined. Please pass 
+      -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
   </target>
 
   <target name="java5.check" unless="java5.home">
-    <fail message="'java5.home' is not defined.  Forrest requires Java 5.  Please pass -Djava5.home=&lt;base of Java 5 distribution&gt; to Ant on the command-line." />
+    <fail message="'java5.home' is not defined.  Forrest requires Java 5.  
+       Please pass -Djava5.home=&lt;base of Java 5 distribution&gt; to Ant on the command-line." />
   </target>
 	
   <target name="javadoc-dev" depends="compile, ivy-retrieve-javadoc" description="Generate javadoc for hadoop developers">
     <mkdir dir="${build.javadoc.dev}"/>
     <javadoc
-      overview="${core.src.dir}/overview.html"
+      overview="${java.src.dir}/overview.html"
       packagenames="org.apache.hadoop.*"
       destdir="${build.javadoc.dev}"
       author="true"
@@ -699,9 +819,8 @@
       windowtitle="${Name} ${version} API"
       doctitle="${Name} ${version} Developer API"
       bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
-      maxmemory="${javadoc.maxmemory}"
-      >
-        <packageset dir="${core.src.dir}"/>
+      maxmemory="${javadoc.maxmemory}">
+        <packageset dir="${java.src.dir}"/>
 	<packageset dir="src/contrib/failmon/src/java/"/> 
 
         <link href="${javadoc.link.java}"/>
@@ -736,7 +855,7 @@
        unless="javadoc.is.uptodate">
     <mkdir dir="${build.javadoc}"/>
     <javadoc
-      overview="${core.src.dir}/overview.html"
+      overview="${java.src.dir}/overview.html"
       packagenames="org.apache.hadoop.*"
       destdir="${build.javadoc}"
       author="true"
@@ -745,9 +864,8 @@
       windowtitle="${Name} ${version} API"
       doctitle="${Name} ${version} API"
       bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
-      maxmemory="${javadoc.maxmemory}"
-      >
-        <packageset dir="${core.src.dir}"/>
+      maxmemory="${javadoc.maxmemory}">
+        <packageset dir="${java.src.dir}"/>
 	<packageset dir="src/contrib/failmon/src/java/"/> 
 	
         <link href="${javadoc.link.java}"/>
@@ -855,7 +973,7 @@
 	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
     </exec>
 
-    <subant target="package">
+    <subant target="package" inheritall="true">
       <!--Pass down the version in case its needed again and the target
       distribution directory so contribs know where to install to.-->
       <property name="version" value="${version}"/>
@@ -955,7 +1073,7 @@
 	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
     </exec>
 
-    <subant target="package">
+    <subant target="package" inheritall="true">
       <!--Pass down the version in case its needed again and the target
       distribution directory so contribs know where to install to.-->
       <property name="version" value="${version}"/>
@@ -1009,6 +1127,60 @@
       </param.listofitems>
     </macro_tar>
   </target>
+  
+  <target name="ant-task-download" description="To download mvn-ant-task" unless="offline">
+    <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
+  </target>
+
+  <target name="mvn-taskdef" depends="ant-task-download">
+     <path id="mvn-ant-task.classpath" path="${ant_task.jar}"/> 
+     <typedef resource="org/apache/maven/artifact/ant/antlib.xml" 
+         uri="urn:maven-artifact-ant"
+         classpathref="mvn-ant-task.classpath"/>
+  </target>   
+
+  <target name="mvn-install" depends="mvn-taskdef,jar,jar-test,set-version" 
+     description="To install hadoop core and test jars to local filesystem's m2 cache">
+     <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
+     <artifact:pom file="${hadoop-core-test.pom}" id="hadoop.core.test"/>
+     <artifact:install file="${hadoop-core.jar}">
+        <pom refid="hadoop.core"/>
+     </artifact:install>
+     <artifact:install file="${hadoop-core-test.jar}">
+        <pom refid="hadoop.core.test"/>
+     </artifact:install>
+  </target>
+
+  <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-test, set-version"
+     description="To deploy hadoop core and test jar's to apache snapshot's repository">
+     <artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
+     <artifact:pom file="${hadoop-core-test.pom}" id="hadoop.core.test"/>
+
+     <artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
+     <artifact:deploy file="${hadoop-core.jar}">
+         <remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
+         <pom refid="hadoop.core"/>
+     </artifact:deploy>
+     <artifact:deploy file="${hadoop-core-test.jar}">
+         <remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
+         <pom refid="hadoop.core.test"/>
+     </artifact:deploy>
+  </target>
+  
+  <target name="set-version">
+    <delete file="${basedir}/ivy/hadoop-core.xml"/>
+    <delete file="${basedir}/ivy/hadoop-core-test.xml"/>
+    <copy file="${basedir}/ivy/hadoop-core-template.xml" tofile="${basedir}/ivy/hadoop-core.xml"/>
+    <copy file="${basedir}/ivy/hadoop-core-test-template.xml" tofile="${basedir}/ivy/hadoop-core-test.xml"/>
+    <replaceregexp byline="true">
+      <regexp pattern="@version"/>
+      <substitution expression="${version}"/>
+      <fileset dir="${basedir}/ivy">
+        <include name="hadoop-core.xml"/>
+        <include name="hadoop-core-test.xml"/>
+      </fileset>
+    </replaceregexp>
+  </target>
 
   <!-- ================================================================== -->
   <!-- Perform audit activities for the release                           -->
@@ -1036,23 +1208,31 @@
   <!-- ================================================================== -->
   <!-- Clean.  Delete the build files, and their directories              -->
   <!-- ================================================================== -->
-  <target name="clean" depends="clean-contrib" description="Clean.  Delete the build files, and their directories">
+  <target name="clean" depends="clean-contrib, clean-fi" description="Clean.  Delete the build files, and their directories">
     <delete dir="${build.dir}"/>
+    <delete file="${basedir}/ivy/hadoop-core.xml"/>
+    <delete file="${basedir}/ivy/hadoop-core-test.xml"/>
     <delete dir="${docs.src}/build"/>
     <delete dir="${src.docs.cn}/build"/>
   </target>
 
+  <target name="veryclean" depends="clean" description="Delete mvn ant task jar and ivy ant taks jar">
+    <delete file="${ant_task.jar}"/>
+    <delete file="${ivy.jar}"/>
+  </target>
+
   <!-- ================================================================== -->
   <!-- Clean contrib target. For now, must be called explicitly           -->
   <!-- Using subant instead of ant as a workaround for 30569              -->
   <!-- ================================================================== -->
   <target name="clean-contrib">
-     <subant target="clean">        
+     <subant target="clean" inheritall="true">        
         <fileset file="src/contrib/build.xml"/>
      </subant>  	
   </target>
 	
- <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover.  To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
+ <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. 
+     To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
 
 <target name="clover.setup" if="clover.enabled">
    <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
@@ -1196,99 +1376,106 @@
     </fail>
   </target>
 
+  <property name="ivyresolvelog" value="download-only"/>
+  <property name="ivyretrievelog" value="quiet"/>
 
   <target name="ivy-init" depends="ivy-init-antlib" >
 
     <!--Configure Ivy by reading in the settings file
         If anyone has already read in a settings file into this settings ID, it gets priority
     -->
-    <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'/>
+    <ivy:configure settingsid="${ant.project.name}.ivy.settings" file="${ivysettings.xml}" override='false'
+      realm="Sonatype Nexus Repository Manager"/>
+
   </target>
 
   <target name="ivy-resolve" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
+    	log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-javadoc" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"
+    	log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-releaseaudit" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
-  </target>
-
-  <target name="ivy-resolve-test-hdfswithmr" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test-hdfswithmr" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"
+  		log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-test" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test"
+    	log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-common" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common"
+    	log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-jdiff" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff"
+    	log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-checkstyle" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"
+  		log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-retrieve" depends="ivy-resolve"
     description="Retrieve Ivy-managed artifacts">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+    		log="${ivyretrievelog}"/>
   </target>
 
   <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
     description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+  			log="${ivyretrievelog}"/>
     <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
   </target>
 
   <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
-    description="Retrieve Ivy-managed artifacts for the javadoc configurations">
+    description="Retrieve Ivy-managed artifacts for the jdiff configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+  			log="${ivyretrievelog}"/>
     <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
   </target>
 
   <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
     description="Retrieve Ivy-managed artifacts for the javadoc configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+  			log="${ivyretrievelog}"/>
     <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
   </target>
 
-  <target name="ivy-retrieve-test-hdfswithmr" depends="ivy-resolve-test-hdfswithmr"
-    description="Retrieve Ivy-managed artifacts for the test configurations">
-    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
-    <ivy:cachepath pathid="ivy-test.classpath" conf="test-hdfswithmr"/>
-  </target>
-
   <target name="ivy-retrieve-test" depends="ivy-resolve-test"
     description="Retrieve Ivy-managed artifacts for the test configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+    		log="${ivyretrievelog}"/>
     <ivy:cachepath pathid="ivy-test.classpath" conf="test"/>
   </target>
 
   <target name="ivy-retrieve-common" depends="ivy-resolve-common"
     description="Retrieve Ivy-managed artifacts for the compile configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+    		log="${ivyretrievelog}"/>
     <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
   </target>
 
   <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
     description="Retrieve Ivy-managed artifacts for the compile configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" />
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+    		log="${ivyretrievelog}"/>
     <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
   </target>
 
@@ -1300,22 +1487,9 @@
     </echo>
   </target>
 
-  <target name="assert-hadoop-jar-exists" depends="ivy-init">
-    <fail>
-      <condition >
-        <not>
-          <available file="${hadoop-core.jar}" />
-        </not>
-      </condition>
-      Not found: ${hadoop-core.jar}
-      Please run the target "jar" in the main build file
-    </fail>
-
-  </target>
-
-  <target name="ready-to-publish" depends="jar,assert-hadoop-jar-exists,ivy-resolve"/>
+  <target name="ready-to-publish" depends="jar,ivy-resolve"/>
 
-  <target name="ivy-publish-local" depends="ready-to-publish,ivy-resolve">
+  <target name="ivy-publish-local" depends="ready-to-publish">
     <ivy:publish
       settingsRef="${ant.project.name}.ivy.settings"
       resolver="local"
@@ -1323,45 +1497,4 @@
       overwrite="true"
       artifactspattern="${build.dir}/${ivy.publish.pattern}" />
   </target>
-
-
-  <!-- this is here for curiosity, to see how well the makepom task works
-  Answer: it depends whether you want transitive dependencies excluded or not
-  -->
-  <target name="makepom" depends="ivy-resolve">
-    <ivy:makepom settingsRef="${ant.project.name}.ivy.settings"
-      ivyfile="ivy.xml"
-      pomfile="${build.ivy.maven.dir}/generated.pom">
-      <ivy:mapping conf="default" scope="default"/>
-      <ivy:mapping conf="master" scope="master"/>
-      <ivy:mapping conf="runtime" scope="runtime"/>
-    </ivy:makepom>
-  </target>
-
-
-  <target name="copy-jar-to-maven" depends="ready-to-publish">
-    <copy file="${hadoop-core.jar}"
-      tofile="${build.ivy.maven.jar}"/>
-    <checksum file="${build.ivy.maven.jar}" algorithm="md5"/>
-  </target>
-
-  <target name="copypom" depends="ivy-init-dirs">
-
-   <presetdef name="expandingcopy" >
-    <copy overwrite="true">
-      <filterchain>
-        <expandproperties/>
-      </filterchain>
-    </copy>
-   </presetdef>
-
-   <expandingcopy file="ivy/hadoop-core.pom"
-      tofile="${build.ivy.maven.pom}"/>
-   <checksum file="${build.ivy.maven.pom}" algorithm="md5"/>
-  </target>
-
-  <target name="maven-artifacts" depends="copy-jar-to-maven,copypom" />
-
-  <target name="published" depends="ivy-publish-local,maven-artifacts">
-  </target>
 </project>

Propchange: hadoop/common/branches/HADOOP-6194/ivy/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Sat Nov 28 19:53:33 2009
@@ -1 +1,4 @@
+hadoop-core.xml
+hadoop-core-test.xml
 ivy*.jar
+maven-ant-tasks*.jar