You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by gk...@apache.org on 2012/08/03 21:00:44 UTC

svn commit: r1369164 [1/4] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ dev-support/ src/ src/main/bin/ src/main/conf/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/o...

Author: gkesavan
Date: Fri Aug  3 19:00:15 2012
New Revision: 1369164

URL: http://svn.apache.org/viewvc?rev=1369164&view=rev
Log:
merge -r1358479:HEAD http://svn.apache.org/repos/asf/hadoop/common/trunk

Added:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/Quantile.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/Quantile.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/SampleQuantiles.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/
      - copied from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/KeyStoresFactory.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/KeyStoresFactory.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Time.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestSampleQuantiles.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/
      - copied from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/KeyStoreTestUtil.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/KeyStoreTestUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestReloadingX509TrustManager.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestReloadingX509TrustManager.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java
      - copied unchanged from r1369130, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java
Removed:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/aop/build/
Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-client.xml.example
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-server.xml.example
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/file/FileContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NoEmitMetricsContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContext.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsLongValue.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progressable.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/Timer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestMultithreadedTestUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt Fri Aug  3 19:00:15 2012
@@ -88,6 +88,9 @@ Trunk (unreleased changes)
     HADOOP-8523. test-patch.sh doesn't validate patches before building
     (Jack Dintruff via jeagles)
 
+    HADOOP-8624. ProtobufRpcEngine should log all RPCs if TRACE logging is
+    enabled (todd)
+
   BUG FIXES
 
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -172,6 +175,18 @@ Trunk (unreleased changes)
 
     HADOOP-8110. Fix trash checkpoint collisions (Jason Lowe via daryn)
 
+    HADOOP-8584. test-patch.sh should not immediately exit when no
+    tests are added or modified. (Colin Patrick McCabe via eli)
+
+    HADOOP-8521. Port StreamInputFormat to new Map Reduce API (madhukara
+    phatak via bobby)
+
+    HADOOP-8593. Add missed @Override annotations in Metric/Metrics2 package.
+    (Brandon Li via suresh)
+
+    HADOOP-8623. hadoop jar command should respect HADOOP_OPTS.
+    (Steven Willis via suresh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -242,6 +257,29 @@ Branch-2 ( Unreleased changes )
     HADOOP-8533. Remove parallel call ununsed capability in RPC.
     (Brandon Li via suresh)
 
+    HADOOP-8423. MapFile.Reader.get() crashes jvm or throws
+    EOFException on Snappy or LZO block-compressed data
+    (todd via harsh)
+
+    HADOOP-8541. Better high-percentile latency metrics. (Andrew Wang via atm)
+
+    HADOOP-8362. Improve exception message when Configuration.set() is
+    called with a null key or value. (Madhukara Phatak
+    and Suresh Srinivas via harsh)
+
+    HADOOP-7818. DiskChecker#checkDir should fail if the directory is
+    not executable. (Madhukara Phatak via harsh)
+
+    HADOOP-8531. SequenceFile Writer can throw out a better error if a
+    serializer or deserializer isn't available
+    (Madhukara Phatak via harsh)
+
+    HADOOP-8609. IPC server logs a useless message when shutting down socket.
+    (Jon Zuanich via atm)
+
+    HADOOP-8620. Add -Drequire.fuse and -Drequire.snappy. (Colin
+    Patrick McCabe via eli)
+
   BUG FIXES
 
     HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname
@@ -316,6 +354,29 @@ Branch-2 ( Unreleased changes )
     HADOOP-8566. AvroReflectSerializer.accept(Class) throws a NPE if the class has no 
     package (primitive types and arrays). (tucu)
 
+    HADOOP-8586. Fixup a bunch of SPNEGO misspellings. (eli)
+
+    HADOOP-3886. Error in javadoc of Reporter, Mapper and Progressable
+    (Jingguo Yao via harsh)
+
+    HADOOP-8587. HarFileSystem access of harMetaCache isn't threadsafe. (eli)
+
+    HADOOP-8585. Fix initialization circularity between UserGroupInformation
+    and HadoopConfiguration. (Colin Patrick McCabe via atm)
+
+    HADOOP-8552. Conflict: Same security.log.file for multiple users. 
+    (kkambatl via tucu)
+
+    HADOOP-8537. Fix TFile tests to pass even when native zlib support is not
+    compiled. (todd)
+
+    HADOOP-8626. Typo in default setting for
+    hadoop.security.group.mapping.ldap.search.filter.user. (Jonathan Natkins
+    via atm)
+
+    HADOOP-8480. The native build should honor -DskipTests.
+    (Colin Patrick McCabe via eli)
+
   BREAKDOWN OF HDFS-3042 SUBTASKS
 
     HADOOP-8220. ZKFailoverController doesn't handle failure to become active
@@ -731,6 +792,8 @@ Release 0.23.3 - UNRELEASED
 
     HADOOP-8535. Cut hadoop build times in half (Job Eagles via bobby)
 
+    HADOOP-8525. Provide Improved Traceability for Configuration (bobby)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -785,6 +848,40 @@ Release 0.23.3 - UNRELEASED
     HADOOP-8129. ViewFileSystemTestSetup setupForViewFileSystem is erring
     (Ahmed Radwan and Ravi Prakash via bobby)
 
+    HADOOP-8573. Configuration tries to read from an inputstream resource 
+    multiple times (Robert Evans via tgraves)
+
+    HADOOP-8599. Non empty response from FileSystem.getFileBlockLocations when
+    asking for data beyond the end of file. (Andrey Klochkov via todd)
+
+    HADOOP-8606. FileSystem.get may return the wrong filesystem (Daryn Sharp
+    via bobby)
+
+    HADOOP-8551. fs -mkdir creates parent directories without the -p option
+    (John George via bobby)
+
+    HADOOP-8613. AbstractDelegationTokenIdentifier#getUser() should set token
+    auth type. (daryn)
+
+    HADOOP-8627. FS deleteOnExit may delete the wrong path (daryn via bobby)
+
+    HADOOP-8634. Ensure FileSystem#close doesn't squawk for deleteOnExit paths 
+    (daryn via bobby)
+
+    HADOOP-8550. hadoop fs -touchz automatically created parent directories
+    (John George via bobby)
+
+    HADOOP-8635. Cannot cancel paths registered deleteOnExit (daryn via bobby)
+
+    HADOOP-8637. FilterFileSystem#setWriteChecksum is broken (daryn via bobby)
+
+    HADOOP-8370. Native build failure: javah: class file for 
+    org.apache.hadoop.classification.InterfaceAudience not found  (Trevor
+    Robinson via tgraves)
+
+    HADOOP-8633. Interrupted FsShell copies may leave tmp files (Daryn Sharp
+    via tgraves)
+
 Release 0.23.2 - UNRELEASED 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1358480-1369130

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Fri Aug  3 19:00:15 2012
@@ -295,4 +295,13 @@
       <Class name="~org\.apache\.hadoop\.ha\.proto\.ZKFCProtocolProtos.*"/>
     </Match>
 
+    <!--
+       Manually checked, misses child thread manually syncing on parent's intrinsic lock.
+    -->
+     <Match>
+       <Class name="org.apache.hadoop.metrics2.lib.MutableQuantiles" />
+       <Field name="previousSnapshot" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+
  </FindBugsFilter>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml Fri Aug  3 19:00:15 2012
@@ -31,9 +31,6 @@
   <packaging>jar</packaging>
 
   <properties>
-    <snappy.prefix>/usr/local</snappy.prefix>
-    <snappy.lib>${snappy.prefix}/lib</snappy.lib>
-    <bundle.snappy>false</bundle.snappy>
     <kdc.resource.dir>src/test/resources/kdc</kdc.resource.dir>
     <hadoop.component>common</hadoop.component>
     <is.hadoop.component>true</is.hadoop.component>
@@ -44,7 +41,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-annotations</artifactId>
-      <scope>provided</scope>
+      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>com.google.guava</groupId>
@@ -533,10 +530,10 @@
         <activeByDefault>false</activeByDefault>
       </activation>
       <properties>
-        <snappy.prefix>/usr/local</snappy.prefix>
-        <snappy.lib>${snappy.prefix}/lib</snappy.lib>
-        <snappy.include>${snappy.prefix}/include</snappy.include>
-        <runas.home></runas.home>
+        <snappy.prefix></snappy.prefix>
+        <snappy.lib></snappy.lib>
+        <snappy.include></snappy.include>
+        <require.snappy>false</require.snappy>
       </properties>
       <build>
         <plugins>
@@ -579,9 +576,7 @@
                 <configuration>
                   <target>
                     <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
-                      <env key="CFLAGS" value="-I${snappy.include}"/>
-                      <env key="LDFLAGS" value="-L${snappy.lib}"/>
+                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include}"/>
                     </exec>
                     <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
                       <arg line="VERBOSE=1"/>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt Fri Aug  3 19:00:15 2012
@@ -79,17 +79,26 @@ INCLUDE(CheckCSourceCompiles)
 CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
 CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
 
-find_library(SNAPPY_LIBRARY NAMES snappy PATHS)
-find_path(SNAPPY_INCLUDE_DIR NAMES snappy.h PATHS)
-if (SNAPPY_LIBRARY)
+find_library(SNAPPY_LIBRARY 
+    NAMES snappy
+    PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
+          ${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
+find_path(SNAPPY_INCLUDE_DIR 
+    NAMES snappy.h
+    PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
+          ${CUSTOM_SNAPPY_INCLUDE})
+if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
     GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
     set(SNAPPY_SOURCE_FILES
         "${D}/io/compress/snappy/SnappyCompressor.c"
         "${D}/io/compress/snappy/SnappyDecompressor.c")
-else (${SNAPPY_LIBRARY})
+else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
     set(SNAPPY_INCLUDE_DIR "")
     set(SNAPPY_SOURCE_FILES "")
-endif (SNAPPY_LIBRARY)
+    IF(REQUIRE_SNAPPY)
+        MESSAGE(FATAL_ERROR "Required snappy library could not be found.  SNAPPY_LIBRARY=${SNAPPY_LIBRARY}, SNAPPY_INCLUDE_DIR=${SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_INCLUDE_DIR=${CUSTOM_SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_PREFIX=${CUSTOM_SNAPPY_PREFIX}, CUSTOM_SNAPPY_INCLUDE=${CUSTOM_SNAPPY_INCLUDE}")
+    ENDIF(REQUIRE_SNAPPY)
+endif (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 
 include_directories(
     ${GENERATED_JAVAH}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop Fri Aug  3 19:00:15 2012
@@ -96,33 +96,30 @@ case $COMMAND in
     # the core commands
     if [ "$COMMAND" = "fs" ] ; then
       CLASS=org.apache.hadoop.fs.FsShell
-      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
     elif [ "$COMMAND" = "version" ] ; then
       CLASS=org.apache.hadoop.util.VersionInfo
-      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
     elif [ "$COMMAND" = "jar" ] ; then
       CLASS=org.apache.hadoop.util.RunJar
     elif [ "$COMMAND" = "distcp" ] ; then
       CLASS=org.apache.hadoop.tools.DistCp
       CLASSPATH=${CLASSPATH}:${TOOL_PATH}
-      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
     elif [ "$COMMAND" = "daemonlog" ] ; then
       CLASS=org.apache.hadoop.log.LogLevel
-      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
     elif [ "$COMMAND" = "archive" ] ; then
       CLASS=org.apache.hadoop.tools.HadoopArchives
       CLASSPATH=${CLASSPATH}:${TOOL_PATH}
-      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
     elif [[ "$COMMAND" = -*  ]] ; then
         # class and package names cannot begin with a -
         echo "Error: No command named \`$COMMAND' was found. Perhaps you meant \`hadoop ${COMMAND#-}'"
         exit 1
     else
-      HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
       CLASS=$COMMAND
     fi
     shift
     
+    # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
+    HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+
     #make sure security appender is turned off
     HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties Fri Aug  3 19:00:15 2012
@@ -106,7 +106,7 @@ hadoop.security.logger=INFO,NullAppender
 hadoop.security.log.maxfilesize=256MB
 hadoop.security.log.maxbackupindex=20
 log4j.category.SecurityLogger=${hadoop.security.logger}
-hadoop.security.log.file=SecurityAuth.audit
+hadoop.security.log.file=SecurityAuth-${user.name}.audit
 log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
 log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
 log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-client.xml.example
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-client.xml.example?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-client.xml.example (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-client.xml.example Fri Aug  3 19:00:15 2012
@@ -1,6 +1,21 @@
 <?xml version="1.0"?>
 <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
 <configuration>
 
 <property>
@@ -21,7 +36,15 @@
 <property>
   <name>ssl.client.truststore.type</name>
   <value>jks</value>
-  <description>Optional. Default value is "jks".
+  <description>Optional. The keystore file format, default value is "jks".
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.truststore.reload.interval</name>
+  <value>10000</value>
+  <description>Truststore reload check interval, in milliseconds.
+  Default value is 10000 (10 seconds).
   </description>
 </property>
 
@@ -50,7 +73,7 @@
 <property>
   <name>ssl.client.keystore.type</name>
   <value>jks</value>
-  <description>Optional. Default value is "jks".
+  <description>Optional. The keystore file format, default value is "jks".
   </description>
 </property>
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-server.xml.example
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-server.xml.example?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-server.xml.example (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/conf/ssl-server.xml.example Fri Aug  3 19:00:15 2012
@@ -1,6 +1,21 @@
 <?xml version="1.0"?>
 <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
 <configuration>
 
 <property>
@@ -20,11 +35,18 @@
 <property>
   <name>ssl.server.truststore.type</name>
   <value>jks</value>
-  <description>Optional. Default value is "jks".
+  <description>Optional. The keystore file format, default value is "jks".
   </description>
 </property>
 
 <property>
+  <name>ssl.server.truststore.reload.interval</name>
+  <value>10000</value>
+  <description>Truststore reload check interval, in milliseconds.
+  Default value is 10000 (10 seconds).
+</property>
+
+<property>
   <name>ssl.server.keystore.location</name>
   <value></value>
   <description>Keystore to be used by NN and DN. Must be specified.
@@ -48,7 +70,7 @@
 <property>
   <name>ssl.server.keystore.type</name>
   <value>jks</value>
-  <description>Optional. Default value is "jks".
+  <description>Optional. The keystore file format, default value is "jks".
   </description>
 </property>
 

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1358480-1369130

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml Fri Aug  3 19:00:15 2012
@@ -110,7 +110,7 @@
 
       <p><code>hadoop.http.authentication.kerberos.principal</code>: Indicates the Kerberos 
       principal to be used for HTTP endpoint when using 'kerberos' authentication.
-      The principal short name must be <code>HTTP</code> per Kerberos HTTP SPENGO specification.
+      The principal short name must be <code>HTTP</code> per Kerberos HTTP SPNEGO specification.
       The default value is <code>HTTP/_HOST@$LOCALHOST</code>, where <code>_HOST</code> -if present-
       is replaced with bind address of the HTTP server.
       </p>

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1358480-1369130

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Fri Aug  3 19:00:15 2012
@@ -40,9 +40,11 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
 import java.util.StringTokenizer;
@@ -75,7 +77,6 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonGenerator;
-import org.w3c.dom.Comment;
 import org.w3c.dom.DOMException;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
@@ -83,6 +84,7 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.w3c.dom.Text;
 import org.xml.sax.SAXException;
+import com.google.common.base.Preconditions;
 
 /** 
  * Provides access to configuration parameters.
@@ -158,17 +160,45 @@ public class Configuration implements It
 
   private boolean quietmode = true;
   
+  private static class Resource {
+    private final Object resource;
+    private final String name;
+    
+    public Resource(Object resource) {
+      this(resource, resource.toString());
+    }
+    
+    public Resource(Object resource, String name) {
+      this.resource = resource;
+      this.name = name;
+    }
+    
+    public String getName(){
+      return name;
+    }
+    
+    public Object getResource() {
+      return resource;
+    }
+    
+    @Override
+    public String toString() {
+      return name;
+    }
+  }
+  
   /**
    * List of configuration resources.
    */
-  private ArrayList<Object> resources = new ArrayList<Object>();
-
+  private ArrayList<Resource> resources = new ArrayList<Resource>();
+  
   /**
    * The value reported as the setting resource when a key is set
-   * by code rather than a file resource.
+   * by code rather than a file resource by dumpConfiguration.
    */
   static final String UNKNOWN_RESOURCE = "Unknown";
 
+
   /**
    * List of configuration parameters marked <b>final</b>. 
    */
@@ -202,7 +232,7 @@ public class Configuration implements It
    * Stores the mapping of key to the resource which modifies or loads 
    * the key most recently
    */
-  private HashMap<String, String> updatingResource;
+  private HashMap<String, String[]> updatingResource;
  
   /**
    * Class to keep the information about the keys which replace the deprecated
@@ -369,7 +399,7 @@ public class Configuration implements It
    * @return alternate name.
    */
   private String[] getAlternateNames(String name) {
-    String oldName, altNames[] = null;
+    String altNames[] = null;
     DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
     if (keyInfo == null) {
       altNames = (reverseDeprecatedKeyMap.get(name) != null ) ? 
@@ -485,7 +515,7 @@ public class Configuration implements It
    */
   public Configuration(boolean loadDefaults) {
     this.loadDefaults = loadDefaults;
-    updatingResource = new HashMap<String, String>();
+    updatingResource = new HashMap<String, String[]>();
     synchronized(Configuration.class) {
       REGISTRY.put(this, null);
     }
@@ -498,7 +528,7 @@ public class Configuration implements It
    */
   @SuppressWarnings("unchecked")
   public Configuration(Configuration other) {
-   this.resources = (ArrayList)other.resources.clone();
+   this.resources = (ArrayList<Resource>) other.resources.clone();
    synchronized(other) {
      if (other.properties != null) {
        this.properties = (Properties)other.properties.clone();
@@ -508,7 +538,7 @@ public class Configuration implements It
        this.overlay = (Properties)other.overlay.clone();
      }
 
-     this.updatingResource = new HashMap<String, String>(other.updatingResource);
+     this.updatingResource = new HashMap<String, String[]>(other.updatingResource);
    }
    
     this.finalParameters = new HashSet<String>(other.finalParameters);
@@ -546,7 +576,7 @@ public class Configuration implements It
    *             with that name.
    */
   public void addResource(String name) {
-    addResourceObject(name);
+    addResourceObject(new Resource(name));
   }
 
   /**
@@ -560,7 +590,7 @@ public class Configuration implements It
    *            the classpath.
    */
   public void addResource(URL url) {
-    addResourceObject(url);
+    addResourceObject(new Resource(url));
   }
 
   /**
@@ -574,7 +604,7 @@ public class Configuration implements It
    *             the classpath.
    */
   public void addResource(Path file) {
-    addResourceObject(file);
+    addResourceObject(new Resource(file));
   }
 
   /**
@@ -583,10 +613,29 @@ public class Configuration implements It
    * The properties of this resource will override properties of previously 
    * added resources, unless they were marked <a href="#Final">final</a>. 
    * 
-   * @param in InputStream to deserialize the object from. 
+   * WARNING: The contents of the InputStream will be cached, by this method. 
+   * So use this sparingly because it does increase the memory consumption.
+   * 
+   * @param in InputStream to deserialize the object from. In will be read from
+   * when a get or set is called next.  After it is read the stream will be
+   * closed. 
    */
   public void addResource(InputStream in) {
-    addResourceObject(in);
+    addResourceObject(new Resource(in));
+  }
+
+  /**
+   * Add a configuration resource. 
+   * 
+   * The properties of this resource will override properties of previously 
+   * added resources, unless they were marked <a href="#Final">final</a>. 
+   * 
+   * @param in InputStream to deserialize the object from.
+   * @param name the name of the resource because InputStream.toString is not
+   * very descriptive some times.  
+   */
+  public void addResource(InputStream in, String name) {
+    addResourceObject(new Resource(in, name));
   }
   
   
@@ -603,7 +652,7 @@ public class Configuration implements It
     finalParameters.clear();                      // clear site-limits
   }
   
-  private synchronized void addResourceObject(Object resource) {
+  private synchronized void addResourceObject(Resource resource) {
     resources.add(resource);                      // add to resources
     reloadConfiguration();
   }
@@ -721,17 +770,46 @@ public class Configuration implements It
    * @param value property value.
    */
   public void set(String name, String value) {
+    set(name, value, null);
+  }
+  
+  /** 
+   * Set the <code>value</code> of the <code>name</code> property. If 
+   * <code>name</code> is deprecated or there is a deprecated name associated to it,
+   * it sets the value to both names.
+   * 
+   * @param name property name.
+   * @param value property value.
+   * @param source the place that this configuration value came from 
+   * (For debugging).
+   * @throws IllegalArgumentException when the value or name is null.
+   */
+  public void set(String name, String value, String source) {
+    Preconditions.checkArgument(
+        name != null,
+        "Property name must not be null");
+    Preconditions.checkArgument(
+        value != null,
+        "Property value must not be null");
     if (deprecatedKeyMap.isEmpty()) {
       getProps();
     }
     getOverlay().setProperty(name, value);
     getProps().setProperty(name, value);
-    updatingResource.put(name, UNKNOWN_RESOURCE);
+    if(source == null) {
+      updatingResource.put(name, new String[] {"programatically"});
+    } else {
+      updatingResource.put(name, new String[] {source});
+    }
     String[] altNames = getAlternateNames(name);
     if (altNames != null && altNames.length > 0) {
+      String altSource = "because " + name + " is deprecated";
       for(String altName : altNames) {
-    	getOverlay().setProperty(altName, value);
-        getProps().setProperty(altName, value);
+        if(!altName.equals(name)) {
+          getOverlay().setProperty(altName, value);
+          getProps().setProperty(altName, value);
+          updatingResource.put(altName, new String[] {altSource});
+        }
       }
     }
     warnOnceIfDeprecated(name);
@@ -821,6 +899,25 @@ public class Configuration implements It
     }
     return Integer.parseInt(valueString);
   }
+  
+  /**
+   * Get the value of the <code>name</code> property as a set of comma-delimited
+   * <code>int</code> values.
+   * 
+   * If no such property exists, an empty array is returned.
+   * 
+   * @param name property name
+   * @return property value interpreted as an array of comma-delimited
+   *         <code>int</code> values
+   */
+  public int[] getInts(String name) {
+    String[] strings = getTrimmedStrings(name);
+    int[] ints = new int[strings.length];
+    for (int i = 0; i < strings.length; i++) {
+      ints[i] = Integer.parseInt(strings[i]);
+    }
+    return ints;
+  }
 
   /** 
    * Set the value of the <code>name</code> property to an <code>int</code>.
@@ -1071,17 +1168,22 @@ public class Configuration implements It
   }
 
   /**
-   * Gets the absolute path to the resource object (file, URL, etc.), for a given
-   * property name.
+   * Gets information about why a property was set.  Typically this is the 
+   * path to the resource objects (file, URL, etc.) the property came from, but
+   * it can also indicate that it was set programatically, or because of the
+   * command line.
    *
    * @param name - The property name to get the source of.
-   * @return null - If the property or its source wasn't found or if the property
-   * was defined in code (i.e. in a Configuration instance, not from a physical
-   * resource). Otherwise, returns the absolute path of the resource that loaded
-   * the property name, as a String.
+   * @return null - If the property or its source wasn't found. Otherwise, 
+   * returns a list of the sources of the resource.  The older sources are
+   * the first ones in the list.  So for example if a configuration is set from
+   * the command line, and then written out to a file that is read back in the
+   * first entry would indicate that it was set from the command line, while
+   * the second one would indicate the file that the new configuration was read
+   * in from.
    */
   @InterfaceStability.Unstable
-  public synchronized String getPropertySource(String name) {
+  public synchronized String[] getPropertySources(String name) {
     if (properties == null) {
       // If properties is null, it means a resource was newly added
       // but the props were cleared so as to load it upon future
@@ -1093,11 +1195,11 @@ public class Configuration implements It
     if (properties == null || updatingResource == null) {
       return null;
     } else {
-      String source = updatingResource.get(name);
-      if (source == null || source.equals(UNKNOWN_RESOURCE)) {
+      String[] source = updatingResource.get(name);
+      if(source == null) {
         return null;
       } else {
-        return source;
+        return Arrays.copyOf(source, source.length);
       }
     }
   }
@@ -1702,11 +1804,14 @@ public class Configuration implements It
   protected synchronized Properties getProps() {
     if (properties == null) {
       properties = new Properties();
+      HashMap<String, String[]> backup = 
+        new HashMap<String, String[]>(updatingResource);
       loadResources(properties, resources, quietmode);
       if (overlay!= null) {
         properties.putAll(overlay);
         for (Map.Entry<Object,Object> item: overlay.entrySet()) {
-          updatingResource.put((String) item.getKey(), UNKNOWN_RESOURCE);
+          String key = (String)item.getKey();
+          updatingResource.put(key, backup.get(key));
         }
       }
     }
@@ -1752,26 +1857,33 @@ public class Configuration implements It
   }
 
   private void loadResources(Properties properties,
-                             ArrayList resources,
+                             ArrayList<Resource> resources,
                              boolean quiet) {
     if(loadDefaults) {
       for (String resource : defaultResources) {
-        loadResource(properties, resource, quiet);
+        loadResource(properties, new Resource(resource), quiet);
       }
     
       //support the hadoop-site.xml as a deprecated case
       if(getResource("hadoop-site.xml")!=null) {
-        loadResource(properties, "hadoop-site.xml", quiet);
+        loadResource(properties, new Resource("hadoop-site.xml"), quiet);
       }
     }
     
-    for (Object resource : resources) {
-      loadResource(properties, resource, quiet);
+    for (int i = 0; i < resources.size(); i++) {
+      Resource ret = loadResource(properties, resources.get(i), quiet);
+      if (ret != null) {
+        resources.set(i, ret);
+      }
     }
   }
   
-  private void loadResource(Properties properties, Object name, boolean quiet) {
+  private Resource loadResource(Properties properties, Resource wrapper, boolean quiet) {
+    String name = UNKNOWN_RESOURCE;
     try {
+      Object resource = wrapper.getResource();
+      name = wrapper.getName();
+      
       DocumentBuilderFactory docBuilderFactory 
         = DocumentBuilderFactory.newInstance();
       //ignore all comments inside the xml file
@@ -1790,27 +1902,28 @@ public class Configuration implements It
       DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
       Document doc = null;
       Element root = null;
-
-      if (name instanceof URL) {                  // an URL resource
-        URL url = (URL)name;
+      boolean returnCachedProperties = false;
+      
+      if (resource instanceof URL) {                  // an URL resource
+        URL url = (URL)resource;
         if (url != null) {
           if (!quiet) {
             LOG.info("parsing " + url);
           }
           doc = builder.parse(url.toString());
         }
-      } else if (name instanceof String) {        // a CLASSPATH resource
-        URL url = getResource((String)name);
+      } else if (resource instanceof String) {        // a CLASSPATH resource
+        URL url = getResource((String)resource);
         if (url != null) {
           if (!quiet) {
             LOG.info("parsing " + url);
           }
           doc = builder.parse(url.toString());
         }
-      } else if (name instanceof Path) {          // a file resource
+      } else if (resource instanceof Path) {          // a file resource
         // Can't use FileSystem API or we get an infinite loop
         // since FileSystem uses Configuration API.  Use java.io.File instead.
-        File file = new File(((Path)name).toUri().getPath())
+        File file = new File(((Path)resource).toUri().getPath())
           .getAbsoluteFile();
         if (file.exists()) {
           if (!quiet) {
@@ -1823,25 +1936,32 @@ public class Configuration implements It
             in.close();
           }
         }
-      } else if (name instanceof InputStream) {
+      } else if (resource instanceof InputStream) {
         try {
-          doc = builder.parse((InputStream)name);
+          doc = builder.parse((InputStream)resource);
+          returnCachedProperties = true;
         } finally {
-          ((InputStream)name).close();
+          ((InputStream)resource).close();
         }
-      } else if (name instanceof Element) {
-        root = (Element)name;
+      } else if (resource instanceof Properties) {
+        overlay(properties, (Properties)resource);
+      } else if (resource instanceof Element) {
+        root = (Element)resource;
       }
 
       if (doc == null && root == null) {
         if (quiet)
-          return;
-        throw new RuntimeException(name + " not found");
+          return null;
+        throw new RuntimeException(resource + " not found");
       }
 
       if (root == null) {
         root = doc.getDocumentElement();
       }
+      Properties toAddTo = properties;
+      if(returnCachedProperties) {
+        toAddTo = new Properties();
+      }
       if (!"configuration".equals(root.getTagName()))
         LOG.fatal("bad conf file: top-level element not <configuration>");
       NodeList props = root.getChildNodes();
@@ -1851,7 +1971,7 @@ public class Configuration implements It
           continue;
         Element prop = (Element)propNode;
         if ("configuration".equals(prop.getTagName())) {
-          loadResource(properties, prop, quiet);
+          loadResource(toAddTo, new Resource(prop, name), quiet);
           continue;
         }
         if (!"property".equals(prop.getTagName()))
@@ -1860,6 +1980,7 @@ public class Configuration implements It
         String attr = null;
         String value = null;
         boolean finalParameter = false;
+        LinkedList<String> source = new LinkedList<String>();
         for (int j = 0; j < fields.getLength(); j++) {
           Node fieldNode = fields.item(j);
           if (!(fieldNode instanceof Element))
@@ -1871,7 +1992,10 @@ public class Configuration implements It
             value = ((Text)field.getFirstChild()).getData();
           if ("final".equals(field.getTagName()) && field.hasChildNodes())
             finalParameter = "true".equals(((Text)field.getFirstChild()).getData());
+          if ("source".equals(field.getTagName()) && field.hasChildNodes())
+            source.add(((Text)field.getFirstChild()).getData());
         }
+        source.add(name);
         
         // Ignore this parameter if it has already been marked as 'final'
         if (attr != null) {
@@ -1880,36 +2004,49 @@ public class Configuration implements It
             keyInfo.accessed = false;
             for (String key:keyInfo.newKeys) {
               // update new keys with deprecated key's value 
-              loadProperty(properties, name, key, value, finalParameter);
+              loadProperty(toAddTo, name, key, value, finalParameter, 
+                  source.toArray(new String[source.size()]));
             }
           }
           else {
-            loadProperty(properties, name, attr, value, finalParameter);
+            loadProperty(toAddTo, name, attr, value, finalParameter, 
+                source.toArray(new String[source.size()]));
           }
         }
       }
-        
+      
+      if (returnCachedProperties) {
+        overlay(properties, toAddTo);
+        return new Resource(toAddTo, name);
+      }
+      return null;
     } catch (IOException e) {
-      LOG.fatal("error parsing conf file: " + e);
+      LOG.fatal("error parsing conf " + name, e);
       throw new RuntimeException(e);
     } catch (DOMException e) {
-      LOG.fatal("error parsing conf file: " + e);
+      LOG.fatal("error parsing conf " + name, e);
       throw new RuntimeException(e);
     } catch (SAXException e) {
-      LOG.fatal("error parsing conf file: " + e);
+      LOG.fatal("error parsing conf " + name, e);
       throw new RuntimeException(e);
     } catch (ParserConfigurationException e) {
-      LOG.fatal("error parsing conf file: " + e);
+      LOG.fatal("error parsing conf " + name , e);
       throw new RuntimeException(e);
     }
   }
 
-  private void loadProperty(Properties properties, Object name, String attr,
-      String value, boolean finalParameter) {
+  private void overlay(Properties to, Properties from) {
+    for (Entry<Object, Object> entry: from.entrySet()) {
+      to.put(entry.getKey(), entry.getValue());
+    }
+  }
+  
+  private void loadProperty(Properties properties, String name, String attr,
+      String value, boolean finalParameter, String[] source) {
     if (value != null) {
       if (!finalParameters.contains(attr)) {
         properties.setProperty(attr, value);
-        updatingResource.put(attr, name.toString());
+        updatingResource.put(attr, source);
       } else if (!value.equals(properties.getProperty(attr))) {
         LOG.warn(name+":an attempt to override final parameter: "+attr
             +";  Ignoring.");
@@ -1981,11 +2118,6 @@ public class Configuration implements It
       Element propNode = doc.createElement("property");
       conf.appendChild(propNode);
 
-      if (updatingResource != null) {
-        Comment commentNode = doc.createComment(
-          "Loaded from " + updatingResource.get(name));
-        propNode.appendChild(commentNode);
-      }
       Element nameNode = doc.createElement("name");
       nameNode.appendChild(doc.createTextNode(name));
       propNode.appendChild(nameNode);
@@ -1994,6 +2126,17 @@ public class Configuration implements It
       valueNode.appendChild(doc.createTextNode(value));
       propNode.appendChild(valueNode);
 
+      if (updatingResource != null) {
+        String[] sources = updatingResource.get(name);
+        if(sources != null) {
+          for(String s : sources) {
+            Element sourceNode = doc.createElement("source");
+            sourceNode.appendChild(doc.createTextNode(s));
+            propNode.appendChild(sourceNode);
+          }
+        }
+      }
+      
       conf.appendChild(doc.createTextNode("\n"));
     }
     return doc;
@@ -2026,8 +2169,12 @@ public class Configuration implements It
                                        config.get((String) item.getKey()));
         dumpGenerator.writeBooleanField("isFinal",
                                         config.finalParameters.contains(item.getKey()));
-        dumpGenerator.writeStringField("resource",
-                                       config.updatingResource.get(item.getKey()));
+        String[] resources = config.updatingResource.get(item.getKey());
+        String resource = UNKNOWN_RESOURCE;
+        if(resources != null && resources.length > 0) {
+          resource = resources[0];
+        }
+        dumpGenerator.writeStringField("resource", resource);
         dumpGenerator.writeEndObject();
       }
     }
@@ -2067,7 +2214,7 @@ public class Configuration implements It
     toString(resources, sb);
     return sb.toString();
   }
-
+  
   private <T> void toString(List<T> resources, StringBuilder sb) {
     ListIterator<T> i = resources.listIterator();
     while (i.hasNext()) {
@@ -2104,8 +2251,11 @@ public class Configuration implements It
     clear();
     int size = WritableUtils.readVInt(in);
     for(int i=0; i < size; ++i) {
-      set(org.apache.hadoop.io.Text.readString(in), 
-          org.apache.hadoop.io.Text.readString(in));
+      String key = org.apache.hadoop.io.Text.readString(in);
+      String value = org.apache.hadoop.io.Text.readString(in);
+      set(key, value); 
+      String sources[] = WritableUtils.readCompressedStringArray(in);
+      updatingResource.put(key, sources);
     }
   }
 
@@ -2116,6 +2266,8 @@ public class Configuration implements It
     for(Map.Entry<Object, Object> item: props.entrySet()) {
       org.apache.hadoop.io.Text.writeString(out, (String) item.getKey());
       org.apache.hadoop.io.Text.writeString(out, (String) item.getValue());
+      WritableUtils.writeCompressedStringArray(out, 
+          updatingResource.get(item.getKey()));
     }
   }
   

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Fri Aug  3 19:00:15 2012
@@ -166,5 +166,12 @@ public class CommonConfigurationKeys ext
     "hadoop.http.staticuser.user";
   public static final String DEFAULT_HADOOP_HTTP_STATIC_USER =
     "dr.who";
+
+  /* Path to the Kerberos ticket cache.  Setting this will force
+   * UserGroupInformation to use only this ticket cache file when creating a
+   * FileSystem instance.
+   */
+  public static final String KERBEROS_TICKET_CACHE_PATH =
+      "hadoop.security.kerberos.ticket.cache.path";
 }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java Fri Aug  3 19:00:15 2012
@@ -27,6 +27,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.Time;
 
 /**
  * A daemon thread that waits for the next file system to renew.
@@ -62,7 +63,7 @@ public class DelegationTokenRenewer<T ex
     /** Get the delay until this event should happen. */
     @Override
     public long getDelay(final TimeUnit unit) {
-      final long millisLeft = renewalTime - System.currentTimeMillis();
+      final long millisLeft = renewalTime - Time.now();
       return unit.convert(millisLeft, TimeUnit.MILLISECONDS);
     }
 
@@ -92,7 +93,7 @@ public class DelegationTokenRenewer<T ex
      * @param newTime the new time
      */
     private void updateRenewalTime() {
-      renewalTime = RENEW_CYCLE + System.currentTimeMillis();
+      renewalTime = RENEW_CYCLE + Time.now();
     }
 
     /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java Fri Aug  3 19:00:15 2012
@@ -137,12 +137,10 @@ public abstract class FileSystem extends
    */
   public static FileSystem get(final URI uri, final Configuration conf,
         final String user) throws IOException, InterruptedException {
-    UserGroupInformation ugi;
-    if (user == null) {
-      ugi = UserGroupInformation.getCurrentUser();
-    } else {
-      ugi = UserGroupInformation.createRemoteUser(user);
-    }
+    String ticketCachePath =
+      conf.get(CommonConfigurationKeys.KERBEROS_TICKET_CACHE_PATH);
+    UserGroupInformation ugi =
+        UserGroupInformation.getBestUGI(ticketCachePath, user);
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
       public FileSystem run() throws IOException {
         return get(uri, conf);
@@ -282,11 +280,11 @@ public abstract class FileSystem extends
     String scheme = uri.getScheme();
     String authority = uri.getAuthority();
 
-    if (scheme == null) {                       // no scheme: use default FS
+    if (scheme == null && authority == null) {     // use default FS
       return get(conf);
     }
 
-    if (authority == null) {                       // no authority
+    if (scheme != null && authority == null) {     // no authority
       URI defaultUri = getDefaultUri(conf);
       if (scheme.equals(defaultUri.getScheme())    // if scheme matches default
           && defaultUri.getAuthority() != null) {  // & default has authority
@@ -314,12 +312,10 @@ public abstract class FileSystem extends
    */
   public static FileSystem newInstance(final URI uri, final Configuration conf,
       final String user) throws IOException, InterruptedException {
-    UserGroupInformation ugi;
-    if (user == null) {
-      ugi = UserGroupInformation.getCurrentUser();
-    } else {
-      ugi = UserGroupInformation.createRemoteUser(user);
-    }
+    String ticketCachePath =
+      conf.get(CommonConfigurationKeys.KERBEROS_TICKET_CACHE_PATH);
+    UserGroupInformation ugi =
+        UserGroupInformation.getBestUGI(ticketCachePath, user);
     return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
       public FileSystem run() throws IOException {
         return newInstance(uri,conf); 
@@ -576,7 +572,7 @@ public abstract class FileSystem extends
       throw new IllegalArgumentException("Invalid start or len parameter");
     }
 
-    if (file.getLen() < start) {
+    if (file.getLen() <= start) {
       return new BlockLocation[0];
 
     }
@@ -1218,6 +1214,16 @@ public abstract class FileSystem extends
     }
     return true;
   }
+  
+  /**
+   * Cancel the deletion of the path when the FileSystem is closed
+   * @param f the path to cancel deletion
+   */
+  public boolean cancelDeleteOnExit(Path f) {
+    synchronized (deleteOnExit) {
+      return deleteOnExit.remove(f);
+    }
+  }
 
   /**
    * Delete all files that were marked as delete-on-exit. This recursively
@@ -1228,7 +1234,9 @@ public abstract class FileSystem extends
       for (Iterator<Path> iter = deleteOnExit.iterator(); iter.hasNext();) {
         Path path = iter.next();
         try {
-          delete(path, true);
+          if (exists(path)) {
+            delete(path, true);
+          }
         }
         catch (IOException e) {
           LOG.info("Ignoring failure to deleteOnExit for path " + path);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java Fri Aug  3 19:00:15 2012
@@ -191,23 +191,6 @@ public class FilterFileSystem extends Fi
     return fs.delete(f, recursive);
   }
   
-  /**
-   * Mark a path to be deleted when FileSystem is closed.
-   * When the JVM shuts down,
-   * all FileSystem objects will be closed automatically.
-   * Then,
-   * the marked path will be deleted as a result of closing the FileSystem.
-   *
-   * The path has to exist in the file system.
-   * 
-   * @param f the path to delete.
-   * @return  true if deleteOnExit is successful, otherwise false.
-   * @throws IOException
-   */
-  public boolean deleteOnExit(Path f) throws IOException {
-    return fs.deleteOnExit(f);
-  }    
-
   /** List files in a directory. */
   public FileStatus[] listStatus(Path f) throws IOException {
     return fs.listStatus(f);
@@ -393,7 +376,7 @@ public class FilterFileSystem extends Fi
   
   @Override
   public void setWriteChecksum(boolean writeChecksum) {
-    fs.setVerifyChecksum(writeChecksum);
+    fs.setWriteChecksum(writeChecksum);
   }
 
   @Override

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java Fri Aug  3 19:00:15 2012
@@ -24,11 +24,11 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
-import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -52,7 +52,8 @@ import org.apache.hadoop.util.Progressab
 public class HarFileSystem extends FilterFileSystem {
   public static final int VERSION = 3;
 
-  private static final Map<URI, HarMetaData> harMetaCache = new HashMap<URI, HarMetaData>();
+  private static final Map<URI, HarMetaData> harMetaCache =
+      new ConcurrentHashMap<URI, HarMetaData>();
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -1055,7 +1056,7 @@ public class HarFileSystem extends Filte
       FileStatus archiveStat = fs.getFileStatus(archiveIndexPath);
       archiveIndexTimestamp = archiveStat.getModificationTime();
       LineReader aLin;
-      String retStr = null;
+
       // now start reading the real index file
       for (Store s: stores) {
         read = 0;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java Fri Aug  3 19:00:15 2012
@@ -139,7 +139,7 @@ public class Path implements Comparable 
    * Construct a path from a URI
    */
   public Path(URI aUri) {
-    uri = aUri;
+    uri = aUri.normalize();
   }
   
   /** Construct a Path from components. */

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java Fri Aug  3 19:00:15 2012
@@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configured
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Time;
 
 /** Provides a <i>trash</i> feature.  Files are moved to a user's trash
  * directory, a subdirectory of their home directory named ".Trash".  Files are
@@ -136,7 +137,7 @@ public class TrashPolicyDefault extends 
         String orig = trashPath.toString();
         
         while(fs.exists(trashPath)) {
-          trashPath = new Path(orig + System.currentTimeMillis());
+          trashPath = new Path(orig + Time.now());
         }
         
         if (fs.rename(path, trashPath))           // move to current trash
@@ -187,7 +188,7 @@ public class TrashPolicyDefault extends 
       return;
     }
 
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     for (int i = 0; i < dirs.length; i++) {
       Path path = dirs[i].getPath();
       String dir = path.toUri().getPath();
@@ -248,7 +249,7 @@ public class TrashPolicyDefault extends 
     public void run() {
       if (emptierInterval == 0)
         return;                                   // trash disabled
-      long now = System.currentTimeMillis();
+      long now = Time.now();
       long end;
       while (true) {
         end = ceiling(now, emptierInterval);
@@ -259,7 +260,7 @@ public class TrashPolicyDefault extends 
         }
 
         try {
-          now = System.currentTimeMillis();
+          now = Time.now();
           if (now >= end) {
 
             FileStatus[] homes = null;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Fri Aug  3 19:00:15 2012
@@ -24,6 +24,8 @@ import java.io.InputStream;
 import java.util.LinkedList;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
@@ -232,31 +234,65 @@ abstract class CommandWithDestination ex
     if (target.exists && (target.stat.isDirectory() || !overwrite)) {
       throw new PathExistsException(target.toString());
     }
-    target.fs.setWriteChecksum(writeChecksum);
-    PathData tempFile = null;
+    TargetFileSystem targetFs = new TargetFileSystem(target.fs);
     try {
-      tempFile = target.createTempFile(target+"._COPYING_");
-      FSDataOutputStream out = target.fs.create(tempFile.path, true);
-      IOUtils.copyBytes(in, out, getConf(), true);
+      PathData tempTarget = target.suffix("._COPYING_");
+      targetFs.setWriteChecksum(writeChecksum);
+      targetFs.writeStreamToFile(in, tempTarget);
+      targetFs.rename(tempTarget, target);
+    } finally {
+      targetFs.close(); // last ditch effort to ensure temp file is removed
+    }
+  }
+
+  // Helper filter filesystem that registers created files as temp files to
+  // be deleted on exit unless successfully renamed
+  private static class TargetFileSystem extends FilterFileSystem {
+    TargetFileSystem(FileSystem fs) {
+      super(fs);
+    }
+
+    void writeStreamToFile(InputStream in, PathData target) throws IOException {
+      FSDataOutputStream out = null;
+      try {
+        out = create(target);
+        IOUtils.copyBytes(in, out, getConf(), true);
+      } finally {
+        IOUtils.closeStream(out); // just in case copyBytes didn't
+      }
+    }
+    
+    // tag created files as temp files
+    FSDataOutputStream create(PathData item) throws IOException {
+      try {
+        return create(item.path, true);
+      } finally { // might have been created but stream was interrupted
+        deleteOnExit(item.path);
+      }
+    }
+
+    void rename(PathData src, PathData target) throws IOException {
       // the rename method with an option to delete the target is deprecated
-      if (target.exists && !target.fs.delete(target.path, false)) {
+      if (target.exists && !delete(target.path, false)) {
         // too bad we don't know why it failed
         PathIOException e = new PathIOException(target.toString());
         e.setOperation("delete");
         throw e;
       }
-      if (!tempFile.fs.rename(tempFile.path, target.path)) {
+      if (!rename(src.path, target.path)) {
         // too bad we don't know why it failed
-        PathIOException e = new PathIOException(tempFile.toString());
+        PathIOException e = new PathIOException(src.toString());
         e.setOperation("rename");
         e.setTargetPath(target.toString());
         throw e;
       }
-      tempFile = null;
-    } finally {
-      if (tempFile != null) {
-        tempFile.fs.delete(tempFile.path, false);
-      }
+      // cancel delete on exit if rename is successful
+      cancelDeleteOnExit(src.path);
+    }
+    @Override
+    public void close() {
+      // purge any remaining temp files, but don't close underlying fs
+      processDeleteOnExit();
     }
   }
 }
\ No newline at end of file

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java Fri Aug  3 19:00:15 2012
@@ -23,9 +23,11 @@ import java.util.LinkedList;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.shell.PathExceptions.PathExistsException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
 
 /**
  * Create the given dir
@@ -66,7 +68,11 @@ class Mkdir extends FsCommand {
 
   @Override
   protected void processNonexistentPath(PathData item) throws IOException {
-    // TODO: should use createParents to control intermediate dir creation 
+    // check if parent exists. this is complicated because getParent(a/b/c/) returns a/b/c, but
+    // we want a/b
+    if (!item.fs.exists(new Path(item.path.toString()).getParent()) && !createParents) {
+      throw new PathNotFoundException(item.toString());
+    }
     if (!item.fs.mkdirs(item.path)) {
       throw new PathIOException(item.toString());
     }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java Fri Aug  3 19:00:15 2012
@@ -60,7 +60,7 @@ public class PathData implements Compara
    * @throws IOException if anything goes wrong...
    */
   public PathData(String pathString, Configuration conf) throws IOException {
-    this(FileSystem.get(URI.create(pathString), conf), pathString);
+    this(FileSystem.get(stringToUri(pathString), conf), pathString);
   }
   
   /**
@@ -170,16 +170,13 @@ public class PathData implements Compara
   }
   
   /**
-   * Returns a temporary file for this PathData with the given extension.
-   * The file will be deleted on exit.
-   * @param extension for the temporary file
+   * Returns a new PathData with the given extension.
+   * @param extension for the suffix
    * @return PathData
    * @throws IOException shouldn't happen
    */
-  public PathData createTempFile(String extension) throws IOException {
-    PathData tmpFile = new PathData(fs, uri+"._COPYING_");
-    fs.deleteOnExit(tmpFile.path);
-    return tmpFile;
+  public PathData suffix(String extension) throws IOException {
+    return new PathData(fs, this+extension);
   }
 
   /**