You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2011/09/08 03:39:20 UTC

svn commit: r1166495 [1/2] - in /hadoop/common/branches/HDFS-1623/hadoop-common-project: ./ hadoop-auth-examples/ hadoop-auth-examples/src/ hadoop-auth-examples/src/main/ hadoop-auth-examples/src/main/java/ hadoop-auth-examples/src/main/java/org/ hadoo...

Author: todd
Date: Thu Sep  8 01:39:07 2011
New Revision: 1166495

URL: http://svn.apache.org/viewvc?rev=1166495&view=rev
Log:
Merge trunk into HA branch

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/   (props changed)
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/pom.xml
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/RequestLoggerFilter.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoServlet.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoServlet.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/resources/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/resources/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/resources/log4j.properties
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/resources/log4j.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/WEB-INF/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/WEB-INF/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/WEB-INF/web.xml
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/WEB-INF/web.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/annonymous/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/annonymous/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/annonymous/index.html
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/annonymous/index.html
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/index.html
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/index.html
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/kerberos/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/kerberos/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/kerberos/index.html
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/kerberos/index.html
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/simple/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/simple/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/src/main/webapp/simple/index.html
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth-examples/src/main/webapp/simple/index.html
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticationException.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticationException.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/PseudoAuthenticator.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/PseudoAuthenticator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationHandler.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosName.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/SignerException.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/SignerException.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestSigner.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestSigner.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolInfo.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/
      - copied from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
      - copied unchanged from r1166484, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/BUILDING.txt
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/README.txt
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/examples/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/alfredo/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/alfredo/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestRunJar.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/pom.xml

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth-examples/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Thu Sep  8 01:39:07 2011
@@ -0,0 +1,5 @@
+.classpath
+.git
+.project
+.settings
+target

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/BUILDING.txt?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/BUILDING.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/BUILDING.txt Thu Sep  8 01:39:07 2011
@@ -1,20 +1,20 @@
 
-Build instructions for Hadoop Alfredo
+Build instructions for Hadoop Auth
 
 Same as for Hadoop.
 
-For more details refer to the Alfredo documentation pages.
+For more details refer to the Hadoop Auth documentation pages.
 
 -----------------------------------------------------------------------------
 Caveats:
 
-* Alfredo has profile to enable Kerberos testcases (testKerberos)
+* Hadoop Auth has profile to enable Kerberos testcases (testKerberos)
 
   To run Kerberos testcases a KDC, 2 kerberos principals and a keytab file
-  are required (refer to the Alfredo documentation pages for details).
+  are required (refer to the Hadoop Auth documentation pages for details).
 
-* Alfredo does not have a distribution profile (dist)
+* Hadoop Auth does not have a distribution profile (dist)
 
-* Alfredo does not have a native code profile (native)
+* Hadoop Auth does not have a native code profile (native)
 
 -----------------------------------------------------------------------------

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/README.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/README.txt?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/README.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/README.txt Thu Sep  8 01:39:07 2011
@@ -1,6 +1,6 @@
-Hadoop Alfredo, Java HTTP SPNEGO
+Hadoop Auth, Java HTTP SPNEGO
 
-Hadoop Alfredo is a Java library consisting of a client and a server
+Hadoop Auth is a Java library consisting of a client and a server
 components to enable Kerberos SPNEGO authentication for HTTP.
 
 The client component is the AuthenticatedURL class.
@@ -10,6 +10,6 @@ The server component is the Authenticati
 Authentication mechanisms support is pluggable in both the client and
 the server components via interfaces.
 
-In addition to Kerberos SPNEGO, Alfredo also supports Pseudo/Simple
+In addition to Kerberos SPNEGO, Hadoop Auth also supports Pseudo/Simple
 authentication (trusting the value of the query string parameter
 'user.name').

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/pom.xml?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/pom.xml Thu Sep  8 01:39:07 2011
@@ -21,13 +21,12 @@
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
-  <artifactId>hadoop-alfredo</artifactId>
+  <artifactId>hadoop-auth</artifactId>
   <version>0.24.0-SNAPSHOT</version>
   <packaging>jar</packaging>
 
-  <name>Apache Hadoop Alfredo</name>
-  <description>Apache Hadoop Alfredo - Java HTTP SPNEGO</description>
-  <url>http://hadoop.apache.org/alfredo</url>
+  <name>Apache Hadoop Auth</name>
+  <description>Apache Hadoop Auth - Java HTTP SPNEGO</description>
 
   <properties>
     <maven.build.timestamp.format>yyyyMMdd</maven.build.timestamp.format>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/BuildingIt.apt.vm Thu Sep  8 01:39:07 2011
@@ -11,12 +11,12 @@
 ~~ limitations under the License. See accompanying LICENSE file.
 
   ---
-  Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Building It
+  Hadoop Auth, Java HTTP SPNEGO ${project.version} - Building It
   ---
   ---
   ${maven.build.timestamp}
 
-Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Building It
+Hadoop Auth, Java HTTP SPNEGO ${project.version} - Building It
 
   \[ {{{./index.html}Go Back}} \]
 
@@ -50,14 +50,14 @@ $ mvn test -PtestKerberos
   The following Maven <<<-D>>> options can be used to change the default
   values:
 
-  * <<<alfredo.test.kerberos.realm>>>: default value <<LOCALHOST>>
+  * <<<hadoop-auth.test.kerberos.realm>>>: default value <<LOCALHOST>>
 
-  * <<<alfredo.test.kerberos.client.principal>>>: default value <<client>>
+  * <<<hadoop-auth.test.kerberos.client.principal>>>: default value <<client>>
 
-  * <<<alfredo.test.kerberos.server.principal>>>: default value
+  * <<<hadoop-auth.test.kerberos.server.principal>>>: default value
     <<HTTP/localhost>> (it must start 'HTTP/')
 
-  * <<<alfredo.test.kerberos.keytab.file>>>: default value
+  * <<<hadoop-auth.test.kerberos.keytab.file>>>: default value
     <<${HOME}/${USER}.keytab>>
 
 ** Generating Documentation
@@ -69,7 +69,7 @@ $ mvn package -Pdocs
 +---+
 
   The generated documentation is available at
-  <<<hadoop-alfredo/target/site/>>>.
+  <<<hadoop-auth/target/site/>>>.
 
   \[ {{{./index.html}Go Back}} \]
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Configuration.apt.vm Thu Sep  8 01:39:07 2011
@@ -11,30 +11,30 @@
 ~~ limitations under the License. See accompanying LICENSE file.
 
   ---
-  Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Server Side
+  Hadoop Auth, Java HTTP SPNEGO ${project.version} - Server Side
   Configuration
   ---
   ---
   ${maven.build.timestamp}
 
-Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Server Side
+Hadoop Auth, Java HTTP SPNEGO ${project.version} - Server Side
 Configuration
 
   \[ {{{./index.html}Go Back}} \]
 
 * Server Side Configuration Setup
 
-  The {{{./apidocs/org/apache/hadoop/alfredo/server/AuthenticationFilter.html}
-  AuthenticationFilter filter}} is Alfredo's server side component.
+  The {{{./apidocs/org/apache/hadoop/auth/server/AuthenticationFilter.html}
+  AuthenticationFilter filter}} is Hadoop Auth's server side component.
 
   This filter must be configured in front of all the web application resources
   that required authenticated requests. For example:
 
-  The Alfredo and dependent JAR files must be in the web application classpath
-  (commonly the <<<WEB-INF/lib>>> directory).
+  The Hadoop Auth and dependent JAR files must be in the web application
+  classpath (commonly the <<<WEB-INF/lib>>> directory).
 
-  Alfredo uses SLF4J-API for logging. Alfredo Maven POM dependencies define the
-  SLF4J API dependency but it does not define the dependency on a concrete
+  Hadoop Auth uses SLF4J-API for logging. Auth Maven POM dependencies define
+  the SLF4J API dependency but it does not define the dependency on a concrete
   logging implementation, this must be addded explicitly to the web
   application. For example, if the web applicationan uses Log4j, the
   SLF4J-LOG4J12 and LOG4J jar files must be part part of the web application
@@ -47,7 +47,7 @@ Configuration
 
   * <<<[PREFIX.]type>>>: the authentication type keyword (<<<simple>>> or
     <<<kerberos>>>) or a
-    {{{./apidocs/org/apache/hadoop/alfredo/server/AuthenticationHandler.html}
+    {{{./apidocs/org/apache/hadoop/auth/server/AuthenticationHandler.html}
     Authentication handler implementation}}.
 
   * <<<[PREFIX.]signature.secret>>>: The secret to SHA-sign the generated
@@ -80,7 +80,7 @@ Configuration
 
     * <<<[PREFIX.]kerberos.keytab>>>: The path to the keytab file containing
       the credentials for the kerberos principal. For example:
-      <<</Users/tucu/alfredo.keytab>>>. There is no default value.
+      <<</Users/tucu/tucu.keytab>>>. There is no default value.
 
   <<Example>>:
 
@@ -90,7 +90,7 @@ Configuration
 
     <filter>
         <filter-name>kerberosFilter</filter-name>
-        <filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
+        <filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
         <init-param>
             <param-name>type</param-name>
             <param-value>kerberos</param-value>
@@ -113,7 +113,7 @@ Configuration
         </init-param>
         <init-param>
             <param-name>kerberos.keytab</param-name>
-            <param-value>/tmp/alfredo.keytab</param-value>
+            <param-value>/tmp/auth.keytab</param-value>
         </init-param>
     </filter>
 
@@ -146,7 +146,7 @@ Configuration
 
     <filter>
         <filter-name>simpleFilter</filter-name>
-        <filter-class>org.apache.hadoop.alfredo.server.AuthenticationFilter</filter-class>
+        <filter-class>org.apache.hadoop.security.auth.server.AuthenticationFilter</filter-class>
         <init-param>
             <param-name>type</param-name>
             <param-value>simple</param-value>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/Examples.apt.vm Thu Sep  8 01:39:07 2011
@@ -11,16 +11,16 @@
 ~~ limitations under the License. See accompanying LICENSE file.
 
   ---
-  Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Examples
+  Hadoop Auth, Java HTTP SPNEGO ${project.version} - Examples
   ---
   ---
   ${maven.build.timestamp}
 
-Hadoop Alfredo, Java HTTP SPNEGO ${project.version} - Examples
+Hadoop Auth, Java HTTP SPNEGO ${project.version} - Examples
 
   \[ {{{./index.html}Go Back}} \]
 
-* Accessing a Alfredo protected URL Using a browser
+* Accessing a Hadoop Auth protected URL Using a browser
 
   <<IMPORTANT:>> The browser must support HTTP Kerberos SPNEGO. For example,
   Firefox or Internet Explorer.
@@ -31,7 +31,7 @@ Hadoop Alfredo, Java HTTP SPNEGO ${proje
   the domain of the web server that is HTTP Kerberos SPNEGO protected (if using
   multiple domains and hostname use comma to separate them).
   
-* Accessing a Alfredo protected URL Using <<<curl>>>
+* Accessing a Hadoop Auth protected URL Using <<<curl>>>
 
   <<IMPORTANT:>> The <<<curl>>> version must support GSS, run <<<curl -V>>>.
 
@@ -48,10 +48,10 @@ Features: GSS-Negotiate IPv6 Largefile N
 +---+
 $ kinit
 Please enter the password for tucu@LOCALHOST:
-$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt http://localhost:8080/alfredo-examples/kerberos/who
+$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt http://localhost:8080/hadoop-auth-examples/kerberos/who
 Enter host password for user 'tucu':
 
-Hello Alfredo!
+Hello Hadoop Auth Examples!
 +---+
 
   * The <<<--negotiate>>> option enables SPNEGO in <<<curl>>>.
@@ -68,7 +68,7 @@ Hello Alfredo!
 
 +---+
 ...
-URL url = new URL("http://localhost:8080/alfredo/kerberos/who");
+URL url = new URL("http://localhost:8080/hadoop-auth/kerberos/who");
 AuthenticatedURL.Token token = new AuthenticatedURL.Token();
 ...
 HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
@@ -79,12 +79,12 @@ conn = new AuthenticatedURL(url, token).
 
 * Building and Running the Examples
 
-  Download Alfredo's source code, the examples are in the
+  Download Hadoop-Auth's source code, the examples are in the
   <<<src/main/examples>>> directory.
 
 ** Server Example:
 
-  Edit the <<<src/main/examples/src/main/webapp/WEB-INF/web.xml>>> and set the
+  Edit the <<<hadoop-auth-examples/src/main/webapp/WEB-INF/web.xml>>> and set the
   right configuration init parameters for the <<<AuthenticationFilter>>>
   definition configured for Kerberos (the right Kerberos principal and keytab
   file must be specified). Refer to the {{{./Configuration.html}Configuration
@@ -106,11 +106,11 @@ conn = new AuthenticatedURL(url, token).
 $ kinit
 Please enter the password for tucu@LOCALHOST:
 
-$ curl http://localhost:8080/alfredo-examples/anonymous/who
+$ curl http://localhost:8080/hadoop-auth-examples/anonymous/who
 
-$ curl http://localhost:8080/alfredo-examples/simple/who?user.name=foo
+$ curl http://localhost:8080/hadoop-auth-examples/simple/who?user.name=foo
 
-$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt http://localhost:8080/alfredo-examples/kerberos/who
+$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt http://localhost:8080/hadoop-auth-examples/kerberos/who
 +---+
 
 ** Accessing the server using the Java client example
@@ -121,7 +121,7 @@ Please enter the password for tucu@LOCAL
 
 $ cd examples
 
-$ mvn exec:java -Durl=http://localhost:8080/alfredo-examples/kerberos/who
+$ mvn exec:java -Durl=http://localhost:8080/hadoop-auth-examples/kerberos/who
 
 ....
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/apt/index.apt.vm Thu Sep  8 01:39:07 2011
@@ -11,27 +11,27 @@
 ~~ limitations under the License. See accompanying LICENSE file.
 
   ---
-  Hadoop Alfredo, Java HTTP SPNEGO ${project.version}
+  Hadoop Auth, Java HTTP SPNEGO ${project.version}
   ---
   ---
   ${maven.build.timestamp}
 
-Hadoop Alfredo, Java HTTP SPNEGO ${project.version}
+Hadoop Auth, Java HTTP SPNEGO ${project.version}
 
-  Hadoop Alfredo is a Java library consisting of a client and a server
+  Hadoop Auth is a Java library consisting of a client and a server
   components to enable Kerberos SPNEGO authentication for HTTP.
 
-  Alfredo also supports additional authentication mechanisms on the client
+  Hadoop Auth also supports additional authentication mechanisms on the client
   and the server side via 2 simple interfaces.
 
 * License
 
-  Alfredo is distributed under {{{http://www.apache.org/licenses/}Apache
+  Hadoop Auth is distributed under {{{http://www.apache.org/licenses/}Apache
   License 2.0}}.
 
-* How Does Alfredo Works?
+* How Does Auth Works?
 
-  Alfredo enforces authentication on protected resources, once authentiation
+  Hadoop Auth enforces authentication on protected resources, once authentiation
   has been established it sets a signed HTTP Cookie that contains an
   authentication token with the user name, user principal, authentication type
   and expiration time.

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/site/site.xml Thu Sep  8 01:39:07 2011
@@ -11,7 +11,7 @@
  See the License for the specific language governing permissions and
  limitations under the License. See accompanying LICENSE file.
 -->
-<project name="Hadoop Alfredo">
+<project name="Hadoop Auth">
 
   <version position="right"/>
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Thu Sep  8 01:39:07 2011
@@ -2,6 +2,16 @@ Hadoop Change Log
 
 Trunk (unreleased changes)
 
+  IMPROVEMENTS
+
+    HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
+  HADOOP-7524 Change RPC to allow multiple protocols including multuple versions of the same protocol (sanjay Radia)
+
+  BUGS
+
+    HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
+                 by Jersey (Alejandro Abdelnur via atm)
+
 Release 0.23.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -347,6 +357,20 @@ Release 0.23.0 - Unreleased
     HADOOP-7547. Add generic type in WritableComparable subclasses.
     (Uma Maheswara Rao G via szetszwo)
 
+    HADOOP-7579. Rename package names from alfredo to auth.
+    (Alejandro Abdelnur via szetszwo)
+
+    HADOOP-7594. Support HTTP REST in HttpServer.  (szetszwo)
+
+    HADOOP-7552. FileUtil#fullyDelete doesn't throw IOE but lists it
+    in the throws clause. (eli)
+
+    HADOOP-7580. Add a version of getLocalPathForWrite to LocalDirAllocator
+    which doesn't create dirs. (Chris Douglas & Siddharth Seth via acmurthy) 
+
+    HADOOP-7507. Allow ganglia metrics to include the metrics system tags
+                 in the gmetric names. (Alejandro Abdelnur via todd)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -533,6 +557,12 @@ Release 0.23.0 - Unreleased
     HADOOP-7560. Change src layout to be heirarchical. (Alejandro Abdelnur
     via acmurthy)
 
+    HADOOP-7576. Fix findbugs warnings and javac warnings in hadoop-auth.
+    (szetszwo)
+
+    HADOOP-7593. Fix AssertionError in TestHttpServer.testMaxThreads().
+    (Uma Maheswara Rao G via szetszwo)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep  8 01:39:07 2011
@@ -1,4 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1166484
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml Thu Sep  8 01:39:07 2011
@@ -92,6 +92,28 @@
       <artifactId>jetty-util</artifactId>
       <scope>compile</scope>
     </dependency>
+
+    <dependency>
+      <groupId>asm</groupId>
+      <artifactId>asm</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-server</artifactId>
+      <scope>compile</scope>
+    </dependency>
+
     <dependency>
       <groupId>tomcat</groupId>
       <artifactId>jasper-compiler</artifactId>
@@ -239,7 +261,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-alfredo</artifactId>
+      <artifactId>hadoop-auth</artifactId>
       <scope>compile</scope>
     </dependency>
   </dependencies>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/hadoop-metrics2.properties Thu Sep  8 01:39:07 2011
@@ -43,6 +43,16 @@
 #*.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
 #*.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
 
+# Tag values to use for the ganglia prefix. If not defined no tags are used.
+# If '*' all tags are used. If specifiying multiple tags separate them with 
+# commas. Note that the last segment of the property name is the context name.
+#
+#*.sink.ganglia.tagsForPrefix.jvm=ProcesName
+#*.sink.ganglia.tagsForPrefix.dfs=
+#*.sink.ganglia.tagsForPrefix.rpc=
+#*.sink.ganglia.tagsForPrefix.mapred=
+#*.sink.ganglia.tagsForPrefix.fairscheduler=
+
 #namenode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649
 
 #datanode.sink.ganglia.servers=yourgangliahost_1:8649,yourgangliahost_2:8649

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties Thu Sep  8 01:39:07 2011
@@ -149,3 +149,25 @@ log4j.additivity.org.apache.hadoop.mapre
 #log4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd
 #log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
 #log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Yarn ResourceManager Application Summary Log 
+#
+# Set the ResourceManager summary log filename
+#yarn.server.resourcemanager.appsummary.log.file=rm-appsummary.log
+# Set the ResourceManager summary log level and appender
+#yarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY
+
+# Appender for ResourceManager Application Summary Log - rolled daily
+# Requires the following properties to be set
+#    - hadoop.log.dir (Hadoop Log directory)
+#    - yarn.server.resourcemanager.appsummary.log.file (resource manager app summary log filename)
+#    - yarn.server.resourcemanager.appsummary.logger (resource manager app summary log level and appender)
+
+#log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=${yarn.server.resourcemanager.appsummary.logger}
+#log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false
+#log4j.appender.RMSUMMARY=org.apache.log4j.DailyRollingFileAppender
+#log4j.appender.RMSUMMARY.File=${hadoop.log.dir}/${yarn.server.resourcemanager.appsummary.log.file}
+#log4j.appender.RMSUMMARY.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
+#log4j.appender.RMSUMMARY.DatePattern=.yyyy-MM-dd

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep  8 01:39:07 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1162221
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1166484
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep  8 01:39:07 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1162221
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1166484
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java Thu Sep  8 01:39:07 2011
@@ -28,7 +28,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 
@@ -88,7 +87,7 @@ public class FileUtil {
    * (4) If dir is a normal directory, then dir and all its contents recursively
    *     are deleted.
    */
-  public static boolean fullyDelete(File dir) throws IOException {
+  public static boolean fullyDelete(File dir) {
     if (dir.delete()) {
       // dir is (a) normal file, (b) symlink to a file, (c) empty directory or
       // (d) symlink to a directory
@@ -108,7 +107,7 @@ public class FileUtil {
    * If dir is a symlink to a directory, all the contents of the actual
    * directory pointed to by dir will be deleted.
    */
-  public static boolean fullyDeleteContents(File dir) throws IOException {
+  public static boolean fullyDeleteContents(File dir) {
     boolean deletionSucceeded = true;
     File contents[] = dir.listFiles();
     if (contents != null) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java Thu Sep  8 01:39:07 2011
@@ -128,8 +128,26 @@ public class LocalDirAllocator {
    */
   public Path getLocalPathForWrite(String pathStr, long size, 
       Configuration conf) throws IOException {
+    return getLocalPathForWrite(pathStr, size, conf, true);
+  }
+  
+  /** Get a path from the local FS. Pass size as 
+   *  SIZE_UNKNOWN if not known apriori. We
+   *  round-robin over the set of disks (via the configured dirs) and return
+   *  the first complete path which has enough space 
+   *  @param pathStr the requested path (this will be created on the first 
+   *  available disk)
+   *  @param size the size of the file that is going to be written
+   *  @param conf the Configuration object
+   *  @param checkWrite ensure that the path is writable
+   *  @return the complete path to the file on a local disk
+   *  @throws IOException
+   */
+  public Path getLocalPathForWrite(String pathStr, long size, 
+                                   Configuration conf,
+                                   boolean checkWrite) throws IOException {
     AllocatorPerContext context = obtainContext(contextCfgItemName);
-    return context.getLocalPathForWrite(pathStr, size, conf);
+    return context.getLocalPathForWrite(pathStr, size, conf, checkWrite);
   }
   
   /** Get a path from the local FS for reading. We search through all the
@@ -145,6 +163,23 @@ public class LocalDirAllocator {
     AllocatorPerContext context = obtainContext(contextCfgItemName);
     return context.getLocalPathToRead(pathStr, conf);
   }
+  
+  /**
+   * Get all of the paths that currently exist in the working directories.
+   * @param pathStr the path underneath the roots
+   * @param conf the configuration to look up the roots in
+   * @return all of the paths that exist under any of the roots
+   * @throws IOException
+   */
+  public Iterable<Path> getAllLocalPathsToRead(String pathStr, 
+                                               Configuration conf
+                                               ) throws IOException {
+    AllocatorPerContext context;
+    synchronized (this) {
+      context = obtainContext(contextCfgItemName);
+    }
+    return context.getAllLocalPathsToRead(pathStr, conf);    
+  }
 
   /** Creates a temporary file in the local FS. Pass size as -1 if not known 
    *  apriori. We round-robin over the set of disks (via the configured dirs) 
@@ -214,7 +249,8 @@ public class LocalDirAllocator {
     /** This method gets called everytime before any read/write to make sure
      * that any change to localDirs is reflected immediately.
      */
-    private void confChanged(Configuration conf) throws IOException {
+    private synchronized void confChanged(Configuration conf) 
+        throws IOException {
       String newLocalDirs = conf.get(contextCfgItemName);
       if (!newLocalDirs.equals(savedLocalDirs)) {
         localDirs = conf.getTrimmedStrings(contextCfgItemName);
@@ -251,18 +287,22 @@ public class LocalDirAllocator {
       }
     }
 
-    private Path createPath(String path) throws IOException {
+    private Path createPath(String path, 
+        boolean checkWrite) throws IOException {
       Path file = new Path(new Path(localDirs[dirNumLastAccessed]),
                                     path);
-      //check whether we are able to create a directory here. If the disk
-      //happens to be RDONLY we will fail
-      try {
-        DiskChecker.checkDir(new File(file.getParent().toUri().getPath()));
-        return file;
-      } catch (DiskErrorException d) {
-        LOG.warn("Disk Error Exception: ", d);
-        return null;
+      if (checkWrite) {
+        //check whether we are able to create a directory here. If the disk
+        //happens to be RDONLY we will fail
+        try {
+          DiskChecker.checkDir(new File(file.getParent().toUri().getPath()));
+          return file;
+        } catch (DiskErrorException d) {
+          LOG.warn("Disk Error Exception: ", d);
+          return null;
+        }
       }
+      return file;
     }
 
     /**
@@ -272,17 +312,6 @@ public class LocalDirAllocator {
     int getCurrentDirectoryIndex() {
       return dirNumLastAccessed;
     }
-    
-    /** Get a path from the local FS. This method should be used if the size of 
-     *  the file is not known a priori. 
-     *  
-     *  It will use roulette selection, picking directories
-     *  with probability proportional to their available space. 
-     */
-    public synchronized Path getLocalPathForWrite(String path, 
-        Configuration conf) throws IOException {
-      return getLocalPathForWrite(path, SIZE_UNKNOWN, conf);
-    }
 
     /** Get a path from the local FS. If size is known, we go
      *  round-robin over the set of disks (via the configured dirs) and return
@@ -292,7 +321,7 @@ public class LocalDirAllocator {
      *  with probability proportional to their available space.
      */
     public synchronized Path getLocalPathForWrite(String pathStr, long size, 
-        Configuration conf) throws IOException {
+        Configuration conf, boolean checkWrite) throws IOException {
       confChanged(conf);
       int numDirs = localDirs.length;
       int numDirsSearched = 0;
@@ -324,7 +353,7 @@ public class LocalDirAllocator {
             dir++;
           }
           dirNumLastAccessed = dir;
-          returnPath = createPath(pathStr);
+          returnPath = createPath(pathStr, checkWrite);
           if (returnPath == null) {
             totalAvailable -= availableOnDisk[dir];
             availableOnDisk[dir] = 0; // skip this disk
@@ -335,7 +364,7 @@ public class LocalDirAllocator {
         while (numDirsSearched < numDirs && returnPath == null) {
           long capacity = dirDF[dirNumLastAccessed].getAvailable();
           if (capacity > size) {
-            returnPath = createPath(pathStr);
+            returnPath = createPath(pathStr, checkWrite);
           }
           dirNumLastAccessed++;
           dirNumLastAccessed = dirNumLastAccessed % numDirs; 
@@ -361,7 +390,7 @@ public class LocalDirAllocator {
         Configuration conf) throws IOException {
 
       // find an appropriate directory
-      Path path = getLocalPathForWrite(pathStr, size, conf);
+      Path path = getLocalPathForWrite(pathStr, size, conf, true);
       File dir = new File(path.getParent().toUri().getPath());
       String prefix = path.getName();
 
@@ -398,6 +427,74 @@ public class LocalDirAllocator {
       " the configured local directories");
     }
 
+    private static class PathIterator implements Iterator<Path>, Iterable<Path> {
+      private final FileSystem fs;
+      private final String pathStr;
+      private int i = 0;
+      private final String[] rootDirs;
+      private Path next = null;
+
+      private PathIterator(FileSystem fs, String pathStr, String[] rootDirs)
+          throws IOException {
+        this.fs = fs;
+        this.pathStr = pathStr;
+        this.rootDirs = rootDirs;
+        advance();
+      }
+
+      @Override
+      public boolean hasNext() {
+        return next != null;
+      }
+
+      private void advance() throws IOException {
+        while (i < rootDirs.length) {
+          next = new Path(rootDirs[i++], pathStr);
+          if (fs.exists(next)) {
+            return;
+          }
+        }
+        next = null;
+      }
+
+      @Override
+      public Path next() {
+        Path result = next;
+        try {
+          advance();
+        } catch (IOException ie) {
+          throw new RuntimeException("Can't check existance of " + next, ie);
+        }
+        return result;
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException("read only iterator");
+      }
+
+      @Override
+      public Iterator<Path> iterator() {
+        return this;
+      }
+    }
+
+    /**
+     * Get all of the paths that currently exist in the working directories.
+     * @param pathStr the path underneath the roots
+     * @param conf the configuration to look up the roots in
+     * @return all of the paths that exist under any of the roots
+     * @throws IOException
+     */
+    synchronized Iterable<Path> getAllLocalPathsToRead(String pathStr,
+        Configuration conf) throws IOException {
+      confChanged(conf);
+      if (pathStr.startsWith("/")) {
+        pathStr = pathStr.substring(1);
+      }
+      return new PathIterator(localFS, pathStr, localDirs);
+    }
+
     /** We search through all the configured dirs for the file's existence
      *  and return true when we find one 
      */

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Thu Sep  8 01:39:07 2011
@@ -228,10 +228,10 @@ public class RawLocalFileSystem extends 
   public FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException {
     if (!exists(f)) {
-      throw new FileNotFoundException("File " + f + " not found.");
+      throw new FileNotFoundException("File " + f + " not found");
     }
     if (getFileStatus(f).isDirectory()) {
-      throw new IOException("Cannot append to a diretory (=" + f + " ).");
+      throw new IOException("Cannot append to a diretory (=" + f + " )");
     }
     return new FSDataOutputStream(new BufferedOutputStream(
         new LocalFSFileOutputStream(f, true), bufferSize), statistics);
@@ -242,7 +242,7 @@ public class RawLocalFileSystem extends 
     short replication, long blockSize, Progressable progress)
     throws IOException {
     if (exists(f) && !overwrite) {
-      throw new IOException("File already exists:"+f);
+      throw new IOException("File already exists: "+f);
     }
     Path parent = f.getParent();
     if (parent != null && !mkdirs(parent)) {
@@ -271,11 +271,18 @@ public class RawLocalFileSystem extends 
     return FileUtil.copy(this, src, this, dst, true, getConf());
   }
   
+  /**
+   * Delete the given path to a file or directory.
+   * @param p the path to delete
+   * @param recursive to delete sub-directories
+   * @return true if the file or directory and all its contents were deleted
+   * @throws IOException if p is non-empty and recursive is false 
+   */
   public boolean delete(Path p, boolean recursive) throws IOException {
     File f = pathToFile(p);
     if (f.isFile()) {
       return f.delete();
-    } else if ((!recursive) && f.isDirectory() && 
+    } else if (!recursive && f.isDirectory() && 
         (FileUtil.listFiles(f).length != 0)) {
       throw new IOException("Directory " + f.toString() + " is not empty");
     }
@@ -287,7 +294,7 @@ public class RawLocalFileSystem extends 
     FileStatus[] results;
 
     if (!localf.exists()) {
-      throw new FileNotFoundException("File " + f + " does not exist.");
+      throw new FileNotFoundException("File " + f + " does not exist");
     }
     if (localf.isFile()) {
       return new FileStatus[] {
@@ -421,7 +428,7 @@ public class RawLocalFileSystem extends 
     if (path.exists()) {
       return new RawLocalFileStatus(pathToFile(f), getDefaultBlockSize(), this);
     } else {
-      throw new FileNotFoundException("File " + f + " does not exist.");
+      throw new FileNotFoundException("File " + f + " does not exist");
     }
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Thu Sep  8 01:39:07 2011
@@ -48,16 +48,12 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.ConfServlet;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.http.AdminAuthorizedServlet;
-import org.apache.hadoop.http.FilterContainer;
-import org.apache.hadoop.http.FilterInitializer;
-import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.jmx.JMXJsonServlet;
 import org.apache.hadoop.log.LogLevel;
 import org.apache.hadoop.metrics.MetricsServlet;
 import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.mortbay.io.Buffer;
@@ -79,6 +75,8 @@ import org.mortbay.jetty.webapp.WebAppCo
 import org.mortbay.thread.QueuedThreadPool;
 import org.mortbay.util.MultiException;
 
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal
  * is to serve up status information for the server.
@@ -178,7 +176,7 @@ public class HttpServer implements Filte
 
     int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1);
     // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
-    // default value (currently 254).
+    // default value (currently 250).
     QueuedThreadPool threadPool = maxThreads == -1 ?
         new QueuedThreadPool() : new QueuedThreadPool(maxThreads);
     webServer.setThreadPool(threadPool);
@@ -325,6 +323,22 @@ public class HttpServer implements Filte
     webAppContext.setAttribute(name, value);
   }
 
+  /** 
+   * Add a Jersey resource package.
+   * @param packageName The Java package name containing the Jersey resource.
+   * @param pathSpec The path spec for the servlet
+   */
+  public void addJerseyResourcePackage(final String packageName,
+      final String pathSpec) {
+    LOG.info("addJerseyResourcePackage: packageName=" + packageName
+        + ", pathSpec=" + pathSpec);
+    final ServletHolder sh = new ServletHolder(ServletContainer.class);
+    sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
+        "com.sun.jersey.api.core.PackagesResourceConfig");
+    sh.setInitParameter("com.sun.jersey.config.property.packages", packageName);
+    webAppContext.addServlet(sh, pathSpec);
+  }
+
   /**
    * Add a servlet in the server.
    * @param name The name of the servlet (can be passed as null)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Thu Sep  8 01:39:07 2011
@@ -285,8 +285,8 @@ public class Client {
         authMethod = AuthMethod.KERBEROS;
       }
       
-      header = new ConnectionHeader(protocol == null ? null : protocol
-          .getName(), ticket, authMethod);
+      header = 
+        new ConnectionHeader(RPC.getProtocolName(protocol), ticket, authMethod);
       
       if (LOG.isDebugEnabled())
         LOG.debug("Use " + authMethod + " authentication for protocol "

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Thu Sep  8 01:39:07 2011
@@ -62,6 +62,20 @@ import org.apache.hadoop.util.Reflection
  */
 public class RPC {
   static final Log LOG = LogFactory.getLog(RPC.class);
+  
+  
+  /**
+   * Get the protocol name.
+   *  If the protocol class has a ProtocolAnnotation, then get the protocol
+   *  name from the annotation; otherwise the class name is the protocol name.
+   */
+  static public String getProtocolName(Class<?> protocol) {
+    if (protocol == null) {
+      return null;
+    }
+    ProtocolInfo anno = (ProtocolInfo) protocol.getAnnotation(ProtocolInfo.class);
+    return  (anno == null) ? protocol.getName() : anno.protocolName();
+  }
 
   private RPC() {}                                  // no public ctor
 
@@ -553,8 +567,10 @@ public class RPC {
   }
 
   /** Construct a server for a protocol implementation instance. */
-  public static Server getServer(Class<?> protocol,
-                                 Object instance, String bindAddress, int port,
+
+  public static <PROTO extends VersionedProtocol, IMPL extends PROTO> 
+        Server getServer(Class<PROTO> protocol,
+                                 IMPL instance, String bindAddress, int port,
                                  int numHandlers, int numReaders, int queueSizePerHandler,
                                  boolean verbose, Configuration conf,
                                  SecretManager<? extends TokenIdentifier> secretManager) 
@@ -576,6 +592,18 @@ public class RPC {
       super(bindAddress, port, paramClass, handlerCount, numReaders, queueSizePerHandler,
             conf, serverName, secretManager);
     }
+    
+    /**
+     * Add a protocol to the existing server.
+     * @param protocolClass - the protocol class
+     * @param protocolImpl - the impl of the protocol that will be called
+     * @return the server (for convenience)
+     */
+    public <PROTO extends VersionedProtocol, IMPL extends PROTO>
+      Server addProtocol(Class<PROTO> protocolClass, IMPL protocolImpl
+    ) throws IOException {
+      throw new IOException("addProtocol Not Implemented");
+    }
   }
 
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Thu Sep  8 01:39:07 2011
@@ -900,7 +900,7 @@ public abstract class Server {
     private InetAddress addr;
     
     ConnectionHeader header = new ConnectionHeader();
-    Class<?> protocol;
+    String protocolName;
     boolean useSasl;
     SaslServer saslServer;
     private AuthMethod authMethod;
@@ -1287,15 +1287,8 @@ public abstract class Server {
       DataInputStream in =
         new DataInputStream(new ByteArrayInputStream(buf));
       header.readFields(in);
-      try {
-        String protocolClassName = header.getProtocol();
-        if (protocolClassName != null) {
-          protocol = getProtocolClass(header.getProtocol(), conf);
-          rpcDetailedMetrics.init(protocol);
-        }
-      } catch (ClassNotFoundException cnfe) {
-        throw new IOException("Unknown protocol: " + header.getProtocol());
-      }
+      protocolName = header.getProtocol();
+
       
       UserGroupInformation protocolUser = header.getUgi();
       if (!useSasl) {
@@ -1484,7 +1477,7 @@ public abstract class Server {
             // Make the call as the user via Subject.doAs, thus associating
             // the call with the Subject
             if (call.connection.user == null) {
-              value = call(call.connection.protocol, call.param, 
+              value = call(call.connection.protocolName, call.param, 
                            call.timestamp);
             } else {
               value = 
@@ -1493,7 +1486,7 @@ public abstract class Server {
                      @Override
                      public Writable run() throws Exception {
                        // make the call
-                       return call(call.connection.protocol, 
+                       return call(call.connection.protocolName, 
                                    call.param, call.timestamp);
 
                      }
@@ -1753,7 +1746,7 @@ public abstract class Server {
   
   /** 
    * Called for each call. 
-   * @deprecated Use {@link #call(Class, Writable, long)} instead
+   * @deprecated Use {@link #call(String, Writable, long)} instead
    */
   @Deprecated
   public Writable call(Writable param, long receiveTime) throws IOException {
@@ -1761,7 +1754,7 @@ public abstract class Server {
   }
   
   /** Called for each call. */
-  public abstract Writable call(Class<?> protocol,
+  public abstract Writable call(String protocol,
                                Writable param, long receiveTime)
   throws IOException;
   

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java?rev=1166495&r1=1166494&r2=1166495&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java Thu Sep  8 01:39:07 2011
@@ -34,7 +34,6 @@ public interface VersionedProtocol {
    * @return the version that the server will speak
    * @throws IOException if any IO error occurs
    */
-  @Deprecated
   public long getProtocolVersion(String protocol,
                                  long clientVersion) throws IOException;