You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by su...@apache.org on 2010/06/19 01:37:16 UTC

svn commit: r956155 [1/2] - in /hadoop/hdfs/trunk: ./ src/ant/org/apache/hadoop/ant/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/security/token/block/ src/jav...

Author: suresh
Date: Fri Jun 18 23:37:13 2010
New Revision: 956155

URL: http://svn.apache.org/viewvc?rev=956155&view=rev
Log:
HDFS-752. Add interfaces classification to to HDFS source code. Contributed by Suresh Srinivas.


Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/fs/Hdfs.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockReader.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DeprecatedUTF8.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HdfsConfiguration.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/AlreadyBeingCreatedException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/Block.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DSQuotaExceededException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DataTransferProtocol.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DirectoryListing.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/FSConstants.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/HdfsFileStatus.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlock.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlocks.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/NSQuotaExceededException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/QuotaExceededException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnregisteredNodeException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockKey.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSelector.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/ExportedBlockKeys.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/InvalidBlockTokenException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/InconsistentFSStateException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/IncorrectVersionException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Storage.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/StorageInfo.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeManager.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObject.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObjectCollection.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeStatusReport.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Upgradeable.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Util.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDatasetInterface.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/Replica.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/UpgradeObjectDatanode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeActivityMBean.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupStorage.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicy.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicyDefault.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CorruptReplicasMap.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSClusterStats.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSInodeInfo.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseExpiredException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NotReplicatedYetException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SafeModeException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UpgradeObjectNamenode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMetrics.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeActivityMBean.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlocksWithLocations.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DisallowedDatanodeException.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/InterDatanodeProtocol.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NodeRegistration.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/NameDistributionVisitor.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/ByteArray.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Fri Jun 18 23:37:13 2010
@@ -12,9 +12,6 @@ Trunk (unreleased changes)
 
   IMPROVEMENTS
 
-    HDFS-1110. Reuses objects for commonly used file names in namenode to
-    reduce the heap usage. (suresh)
-
     HDFS-1096. fix for prev. commit. (boryas)
 
     HDFS-1096. allow dfsadmin/mradmin refresh of superuser proxy group
@@ -51,9 +48,14 @@ Trunk (unreleased changes)
     HDFS-1190.  Remove unused getNamenode() method from DataNode.
     (Jeff Ames via jghoman)
 
+    HDFS-1110. Reuses objects for commonly used file names in namenode to
+    reduce the heap usage. (suresh)
+
     HDFS-1114. Implement LightWeightGSet for BlocksMap in order to reduce
     NameNode memory footprint.  (szetszwo)
 
+    HDFS-752. Add interfaces classification to to HDFS source code. (suresh)
+
   BUG FIXES
 
     HDFS-1039. Adding test for  JspHelper.getUGI(jnp via boryas)

Modified: hadoop/hdfs/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java (original)
+++ hadoop/hdfs/trunk/src/ant/org/apache/hadoop/ant/DfsTask.java Fri Jun 18 23:37:13 2010
@@ -30,15 +30,15 @@ import org.apache.hadoop.fs.FsShell;
 import org.apache.tools.ant.AntClassLoader;
 import org.apache.tools.ant.BuildException;
 import org.apache.tools.ant.Task;
-import org.apache.tools.ant.Project;
-import org.apache.tools.ant.types.Path;
 import org.apache.hadoop.util.ToolRunner;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 
 /**
  * {@link org.apache.hadoop.fs.FsShell FsShell} wrapper for ant Task.
  */
+@InterfaceAudience.Private
 public class DfsTask extends Task {
 
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/fs/Hdfs.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/fs/Hdfs.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/fs/Hdfs.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/fs/Hdfs.java Fri Jun 18 23:37:13 2010
@@ -27,6 +27,8 @@ import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSClient;
@@ -37,6 +39,8 @@ import org.apache.hadoop.hdfs.server.nam
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.Progressable;
 
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class Hdfs extends AbstractFileSystem {
 
   DFSClient dfs;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java Fri Jun 18 23:37:13 2010
@@ -20,10 +20,15 @@ package org.apache.hadoop.hdfs;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /** 
   * This exception is thrown when a read encounters a block that has no locations
   * associated with it.
   */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class BlockMissingException extends IOException {
 
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockReader.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockReader.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockReader.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockReader.java Fri Jun 18 23:37:13 2010
@@ -30,6 +30,7 @@ import java.io.OutputStream;
 import java.net.Socket;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSInputChecker;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DataTransferProtocol;
@@ -44,6 +45,7 @@ import org.apache.hadoop.util.DataChecks
 /** This is a wrapper around connection to datadone
  * and understands checksum, offset etc
  */
+@InterfaceAudience.Private
 public class BlockReader extends FSInputChecker {
 
   Socket dnSock; //for now just sending checksumOk.
@@ -456,4 +458,4 @@ public class BlockReader extends FSInput
                 ": " + e.getMessage());
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java Fri Jun 18 23:37:13 2010
@@ -44,6 +44,7 @@ import javax.net.SocketFactory;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.ContentSummary;
@@ -114,6 +115,7 @@ import org.apache.hadoop.hdfs.security.t
  * filesystem tasks.
  *
  ********************************************************/
+@InterfaceAudience.Private
 public class DFSClient implements FSConstants, java.io.Closeable {
   public static final Log LOG = LogFactory.getLog(DFSClient.class);
   public static final long SERVER_DEFAULTS_VALIDITY_PERIOD = 60 * 60 * 1000L; // 1 hour
@@ -1439,6 +1441,7 @@ public class DFSClient implements FSCons
   /**
    * The Hdfs implementation of {@link FSDataInputStream}
    */
+  @InterfaceAudience.Private
   public static class DFSDataInputStream extends FSDataInputStream {
     public DFSDataInputStream(DFSInputStream in)
       throws IOException {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java Fri Jun 18 23:37:13 2010
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hdfs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 
 /** 
@@ -26,6 +27,7 @@ import org.apache.hadoop.fs.CommonConfig
  *
  */
 
+@InterfaceAudience.Private
 public class DFSConfigKeys extends CommonConfigurationKeys {
 
   public static final String  DFS_BLOCK_SIZE_KEY = "dfs.blocksize";

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java Fri Jun 18 23:37:13 2010
@@ -22,10 +22,12 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.util.StringTokenizer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
 
+@InterfaceAudience.Private
 public class DFSUtil {
   /**
    * Whether the pathname is valid.  Currently prohibits relative paths, 
@@ -55,6 +57,7 @@ public class DFSUtil {
   /**
    * Utility class to facilitate junit test error simulation.
    */
+  @InterfaceAudience.Private
   public static class ErrorSimulator {
     private static boolean[] simulation = null; // error simulation events
     public static void initializeErrorSimulationEvent(int numberOfEvents) {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DeprecatedUTF8.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DeprecatedUTF8.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DeprecatedUTF8.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DeprecatedUTF8.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * A simple wrapper around {@link org.apache.hadoop.io.UTF8}.
  * This class should be used only when it is absolutely necessary
@@ -31,6 +33,7 @@ import java.io.IOException;
  * 
  * This should be treated as package private class to HDFS.
  */
+@InterfaceAudience.Private
 @SuppressWarnings("deprecation")
 public class DeprecatedUTF8 extends org.apache.hadoop.io.UTF8 {
   

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DistributedFileSystem.java Fri Jun 18 23:37:13 2010
@@ -25,6 +25,8 @@ import java.util.EnumSet;
 
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
@@ -52,6 +54,8 @@ import org.apache.hadoop.fs.Options;
  * DistributedFileSystem.
  *
  *****************************************************************/
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DistributedFileSystem extends FileSystem {
   private Path workingDir;
   private URI uri;
@@ -413,6 +417,7 @@ public class DistributedFileSystem exten
   }        
   
   /** @deprecated Use {@link org.apache.hadoop.fs.FsStatus} instead */
+  @InterfaceAudience.Private
   @Deprecated
   public static class DiskStatus extends FsStatus {
     public DiskStatus(FsStatus stats) {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
@@ -30,6 +31,7 @@ import org.apache.hadoop.security.author
 /**
  * {@link PolicyProvider} for HDFS protocols.
  */
+@InterfaceAudience.Private
 public class HDFSPolicyProvider extends PolicyProvider {
   private static final Service[] hdfsServices =
     new Service[] {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HdfsConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HdfsConfiguration.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HdfsConfiguration.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HdfsConfiguration.java Fri Jun 18 23:37:13 2010
@@ -20,9 +20,12 @@ package org.apache.hadoop.hdfs;
 
 import org.apache.hadoop.conf.Configuration;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Adds deprecated keys into the configuration.
  */
+@InterfaceAudience.Private
 public class HdfsConfiguration extends Configuration {
   static {
     addDeprecatedKeys();

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java Fri Jun 18 23:37:13 2010
@@ -33,6 +33,8 @@ import java.util.EnumSet;
 import java.util.Random;
 import java.util.TimeZone;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.CreateFlag;
@@ -63,6 +65,8 @@ import org.xml.sax.helpers.XMLReaderFact
  * @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet
  * @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class HftpFileSystem extends FileSystem {
   static {
     HttpURLConnection.setFollowRedirects(true);

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HsftpFileSystem.java Fri Jun 18 23:37:13 2010
@@ -37,6 +37,8 @@ import javax.net.ssl.TrustManager;
 import javax.net.ssl.TrustManagerFactory;
 import javax.net.ssl.X509TrustManager;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -47,6 +49,8 @@ import org.apache.hadoop.conf.Configurat
  * @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet
  * @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class HsftpFileSystem extends HftpFileSystem {
 
   private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/AlreadyBeingCreatedException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/AlreadyBeingCreatedException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/AlreadyBeingCreatedException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/AlreadyBeingCreatedException.java Fri Jun 18 23:37:13 2010
@@ -20,10 +20,15 @@ package org.apache.hadoop.hdfs.protocol;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * The exception that happens when you ask to create a file that already
  * is being created, but is not closed yet.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class AlreadyBeingCreatedException extends IOException {
   static final long serialVersionUID = 0x12308AD009L;
   public AlreadyBeingCreatedException(String msg) {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/Block.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/Block.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/Block.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/Block.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,8 @@ import java.io.*;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.io.*;
 
@@ -29,6 +31,8 @@ import org.apache.hadoop.io.*;
  * long.
  *
  **************************************************/
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class Block implements Writable, Comparable<Block> {
   public static final String BLOCK_FILE_PREFIX = "blk_";
   public static final String METADATA_EXTENSION = ".meta";

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.protocol;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.ReplicaState;
 import org.apache.hadoop.hdfs.server.datanode.ReplicaInfo;
 
@@ -40,6 +42,8 @@ import org.apache.hadoop.hdfs.server.dat
  *   represented by 4 longs: three for the block id, length, generation 
  *   stamp, and the forth for the replica state.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class BlockListAsLongs implements Iterable<Block> {
   /**
    * A finalized block as 3 longs
@@ -131,6 +135,8 @@ public class BlockListAsLongs implements
    * Iterates over blocks in the block report.
    * Avoids object allocation on each iteration.
    */
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
   public class BlockReportIterator implements Iterator<Block> {
     private int currentBlockIndex;
     private Block block;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java Fri Jun 18 23:37:13 2010
@@ -21,12 +21,16 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenSelector;
 import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.security.token.TokenInfo;
 
 /** An client-datanode protocol for block recovery
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 @TokenInfo(BlockTokenSelector.class)
 public interface ClientDatanodeProtocol extends VersionedProtocol {
   public static final Log LOG = LogFactory.getLog(ClientDatanodeProtocol.class);

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.IOException;
 
 import org.apache.avro.reflect.Nullable;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FileStatus;
@@ -55,6 +57,8 @@ import org.apache.hadoop.hdfs.security.t
  * as well as open/close file streams, etc.
  *
  **********************************************************************/
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 @KerberosInfo(
     serverPrincipal = DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)
 @TokenInfo(DelegationTokenSelector.class)

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DSQuotaExceededException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DSQuotaExceededException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DSQuotaExceededException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DSQuotaExceededException.java Fri Jun 18 23:37:13 2010
@@ -18,8 +18,12 @@
 
 package org.apache.hadoop.hdfs.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.StringUtils;
 
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DSQuotaExceededException extends QuotaExceededException {
   protected static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DataTransferProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DataTransferProtocol.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DataTransferProtocol.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DataTransferProtocol.java Fri Jun 18 23:37:13 2010
@@ -24,6 +24,8 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.security.token.
 /**
  * Transfer data to/from datanode using a streaming protocol.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public interface DataTransferProtocol {
   
   
@@ -214,6 +218,8 @@ public interface DataTransferProtocol {
 
 
   /** Sender */
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
   public static class Sender {
     /** Initialize a operation. */
     public static void op(DataOutputStream out, Op op) throws IOException {
@@ -461,6 +467,8 @@ public interface DataTransferProtocol {
   }
   
   /** reply **/
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
   public static class PipelineAck implements Writable {
     private long seqno;
     private Status replies[];

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.DeprecatedUTF8;
 import org.apache.hadoop.io.WritableComparable;
 
@@ -31,6 +33,8 @@ import org.apache.hadoop.io.WritableComp
  * which it currently represents.
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DatanodeID implements WritableComparable<DatanodeID> {
   public static final DatanodeID[] EMPTY_ARRAY = {}; 
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Date;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
@@ -40,6 +42,8 @@ import org.apache.avro.reflect.Nullable;
  * This object is used for communication in the
  * Datanode Protocol and the Client Protocol.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DatanodeInfo extends DatanodeID implements Node {
   protected long capacity;
   protected long dfsUsed;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DirectoryListing.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DirectoryListing.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DirectoryListing.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/DirectoryListing.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
@@ -28,6 +30,8 @@ import org.apache.hadoop.io.WritableFact
  * This class defines a partial listing of a directory to support
  * iterative directory listing.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DirectoryListing implements Writable {
   static {                                      // register a ctor
     WritableFactories.setFactory

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/FSConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/FSConstants.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/FSConstants.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/FSConstants.java Fri Jun 18 23:37:13 2010
@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.hdfs.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 
 /************************************
  * Some handy constants
  *
  ************************************/
+@InterfaceAudience.Private
 public interface FSConstants {
   public static int MIN_BLOCKS_FOR_WRITE = 5;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/HdfsFileStatus.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/HdfsFileStatus.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/HdfsFileStatus.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/HdfsFileStatus.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSUtil;
@@ -33,6 +35,8 @@ import org.apache.avro.reflect.Nullable;
 
 /** Interface that represents the over the wire information for a file.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class HdfsFileStatus implements Writable {
   static {                                      // register a ctor
     WritableFactories.setFactory

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlock.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlock.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlock.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlock.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.security.token.Token;
@@ -28,6 +30,8 @@ import java.io.*;
  * objects.  It tells where to find a Block.
  * 
  ****************************************************/
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class LocatedBlock implements Writable {
 
   static {                                      // register a ctor

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlocks.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlocks.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlocks.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/LocatedBlocks.java Fri Jun 18 23:37:13 2010
@@ -25,6 +25,8 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
@@ -34,6 +36,8 @@ import org.apache.avro.reflect.Nullable;
 /**
  * Collection of blocks with their locations and the file length.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class LocatedBlocks implements Writable {
   private long fileLength;
   private List<LocatedBlock> blocks; // array of blocks with prioritized locations

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/NSQuotaExceededException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/NSQuotaExceededException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/NSQuotaExceededException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/NSQuotaExceededException.java Fri Jun 18 23:37:13 2010
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.hdfs.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public final class NSQuotaExceededException extends QuotaExceededException {
   protected static final long serialVersionUID = 1L;
   

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/QuotaExceededException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/QuotaExceededException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/QuotaExceededException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/QuotaExceededException.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,9 @@ package org.apache.hadoop.hdfs.protocol;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /** 
  * This exception is thrown when modification to HDFS results in violation
  * of a directory quota. A directory quota might be namespace quota (limit 
@@ -32,6 +35,8 @@ import java.io.IOException;
  *  DSQuotaExceededException or
  *  NSQuotaExceededException
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class QuotaExceededException extends IOException {
   protected static final long serialVersionUID = 1L;
   protected String pathName=null;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java Fri Jun 18 23:37:13 2010
@@ -19,9 +19,14 @@ package org.apache.hadoop.hdfs.protocol;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Exception indicating that a replica is already being recovery.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class RecoveryInProgressException extends IOException {
   private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnregisteredNodeException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnregisteredNodeException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnregisteredNodeException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnregisteredNodeException.java Fri Jun 18 23:37:13 2010
@@ -20,12 +20,16 @@ package org.apache.hadoop.hdfs.protocol;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.server.protocol.NodeRegistration;
 
 /**
  * This exception is thrown when a node that has not previously 
  * registered is trying to access the name node.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class UnregisteredNodeException extends IOException {
   private static final long serialVersionUID = -5620209396945970810L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java Fri Jun 18 23:37:13 2010
@@ -19,12 +19,17 @@
 package org.apache.hadoop.hdfs.protocol;
 
 import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.Path;
 
 /** 
  * Thrown when a symbolic link is encountered in a path.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public final class UnresolvedPathException extends UnresolvedLinkException {
   private static final long serialVersionUID = 1L;
   private String originalPath;  // The original path containing the link

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockKey.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockKey.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockKey.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockKey.java Fri Jun 18 23:37:13 2010
@@ -20,11 +20,13 @@ package org.apache.hadoop.hdfs.security.
 
 import javax.crypto.SecretKey;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.security.token.delegation.DelegationKey;
 
 /**
  * Key used for generating and verifying block tokens
  */
+@InterfaceAudience.Private
 public class BlockKey extends DelegationKey {
 
   public BlockKey() {
@@ -34,4 +36,4 @@ public class BlockKey extends Delegation
   public BlockKey(int keyId, long expiryDate, SecretKey key) {
     super(keyId, expiryDate, key);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java Fri Jun 18 23:37:13 2010
@@ -23,12 +23,14 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.EnumSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager.AccessMode;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
+@InterfaceAudience.Private
 public class BlockTokenIdentifier extends TokenIdentifier {
   static final Text KIND_NAME = new Text("HDFS_BLOCK_TOKEN");
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java Fri Jun 18 23:37:13 2010
@@ -30,6 +30,7 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -43,6 +44,7 @@ import org.apache.hadoop.security.token.
  * master and slave can generate and verify block tokens. Typically, master mode
  * is used by NN and slave mode is used by DN.
  */
+@InterfaceAudience.Private
 public class BlockTokenSecretManager extends
     SecretManager<BlockTokenIdentifier> {
   public static final Log LOG = LogFactory

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSelector.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSelector.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSelector.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSelector.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.security.
 
 import java.util.Collection;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -27,6 +28,7 @@ import org.apache.hadoop.security.token.
 /**
  * A block token selector for HDFS
  */
+@InterfaceAudience.Private
 public class BlockTokenSelector implements TokenSelector<BlockTokenIdentifier> {
 
   @SuppressWarnings("unchecked")

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/ExportedBlockKeys.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/ExportedBlockKeys.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/ExportedBlockKeys.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/ExportedBlockKeys.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
@@ -29,6 +30,7 @@ import org.apache.hadoop.io.WritableFact
 /**
  * Object for passing block keys
  */
+@InterfaceAudience.Private
 public class ExportedBlockKeys implements Writable {
   public static final ExportedBlockKeys DUMMY_KEYS = new ExportedBlockKeys();
   private boolean isBlockTokenEnabled;
@@ -109,4 +111,4 @@ public class ExportedBlockKeys implement
     }
   }
 
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/InvalidBlockTokenException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/InvalidBlockTokenException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/InvalidBlockTokenException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/token/block/InvalidBlockTokenException.java Fri Jun 18 23:37:13 2010
@@ -20,9 +20,14 @@ package org.apache.hadoop.hdfs.security.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Access token verification failed.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class InvalidBlockTokenException extends IOException {
   private static final long serialVersionUID = 168L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java Fri Jun 18 23:37:13 2010
@@ -50,6 +50,7 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -184,6 +185,7 @@ import org.apache.hadoop.util.ToolRunner
  * balancer is running.
  */
 
+@InterfaceAudience.Private
 public class Balancer implements Tool {
   private static final Log LOG = 
     LogFactory.getLog(Balancer.class.getName());

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java Fri Jun 18 23:37:13 2010
@@ -17,9 +17,12 @@
  */
 package org.apache.hadoop.hdfs.server.common;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /****************************************************************
  * A GenerationStamp is a Hadoop FS primitive, identified by a long.
  ****************************************************************/
+@InterfaceAudience.Private
 public class GenerationStamp implements Comparable<GenerationStamp> {
   /**
    * The first valid generation stamp.

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java Fri Jun 18 23:37:13 2010
@@ -21,11 +21,14 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /************************************
  * Some handy internal HDFS constants
  *
  ************************************/
 
+@InterfaceAudience.Private
 public interface HdfsConstants {
   /**
    * Type of the node

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/InconsistentFSStateException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/InconsistentFSStateException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/InconsistentFSStateException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/InconsistentFSStateException.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,9 @@ package org.apache.hadoop.hdfs.server.co
 
 import java.io.File;
 import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -26,6 +29,8 @@ import org.apache.hadoop.util.StringUtil
  * and is not recoverable. 
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class InconsistentFSStateException extends IOException {
   private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/IncorrectVersionException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/IncorrectVersionException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/IncorrectVersionException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/IncorrectVersionException.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.hdfs.server.co
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 
 /**
@@ -26,6 +28,8 @@ import org.apache.hadoop.hdfs.protocol.F
  * current version of the application.
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class IncorrectVersionException extends IOException {
   private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Storage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Storage.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Storage.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Storage.java Fri Jun 18 23:37:13 2010
@@ -31,6 +31,7 @@ import java.util.Properties;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.NodeType;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
@@ -57,6 +58,7 @@ import org.apache.hadoop.util.VersionInf
  * The locks are released when the servers stop (normally or abnormally).
  * 
  */
+@InterfaceAudience.Private
 public abstract class Storage extends StorageInfo {
   public static final Log LOG = LogFactory.getLog(Storage.class.getName());
 
@@ -104,6 +106,7 @@ public abstract class Storage extends St
    * Implementations can define a type for storage directory by implementing
    * this interface.
    */
+  @InterfaceAudience.Private
   public interface StorageDirType {
     public StorageDirType getStorageDirType();
     public boolean isOfType(StorageDirType type);
@@ -190,6 +193,7 @@ public abstract class Storage extends St
   /**
    * One of the storage directories.
    */
+  @InterfaceAudience.Private
   public class StorageDirectory {
     File              root; // root directory
     FileLock          lock; // storage lock

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/StorageInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/StorageInfo.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/StorageInfo.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/StorageInfo.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 
 
@@ -29,6 +30,7 @@ import org.apache.hadoop.io.Writable;
  * 
  * TODO namespaceID should be long and computed as hash(address + port)
  */
+@InterfaceAudience.Private
 public class StorageInfo implements Writable {
   public int   layoutVersion;   // layout version of the storage data
   public int   namespaceID;     // id of the file system

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeManager.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeManager.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeManager.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeManager.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.co
 import java.io.IOException;
 import java.util.SortedSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
 
@@ -29,6 +30,7 @@ import org.apache.hadoop.hdfs.server.pro
  * {@link #broadcastCommand} is the command that should be 
  *
  */
+@InterfaceAudience.Private
 public abstract class UpgradeManager {
   protected SortedSet<Upgradeable> currentUpgrades = null;
   protected boolean upgradeState = false; // true if upgrade is in progress

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObject.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObject.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObject.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObject.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.co
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.common.UpgradeObjectCollection.UOSignature;
 
 /**
@@ -27,6 +28,7 @@ import org.apache.hadoop.hdfs.server.com
  * Contains default implementation of common methods of {@link Upgradeable}
  * interface.
  */
+@InterfaceAudience.Private
 public abstract class UpgradeObject implements Upgradeable {
   protected short status;
   

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObjectCollection.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObjectCollection.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObjectCollection.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeObjectCollection.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.util.StringUtils;
 
@@ -29,6 +30,7 @@ import org.apache.hadoop.util.StringUtil
  *
  * Upgrade objects should be registered here before they can be used. 
  */
+@InterfaceAudience.Private
 public class UpgradeObjectCollection {
   static {
     initialize();

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeStatusReport.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeStatusReport.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeStatusReport.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/UpgradeStatusReport.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
@@ -31,6 +32,7 @@ import org.apache.hadoop.io.WritableFact
  * 
  * Describes status of current upgrade.
  */
+@InterfaceAudience.Private
 public class UpgradeStatusReport implements Writable {
   protected int version;
   protected short upgradeStatus;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Upgradeable.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Upgradeable.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Upgradeable.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Upgradeable.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.co
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
 
 /**
@@ -29,6 +30,7 @@ import org.apache.hadoop.hdfs.server.pro
  * That is all components whose layout version is greater or equal to the
  * one returned by {@link #getVersion()} must be upgraded with this object.
  */
+@InterfaceAudience.Private
 public interface Upgradeable extends Comparable<Upgradeable> {
   /**
    * Get the layout version of the upgrade object.

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Util.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Util.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Util.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/Util.java Fri Jun 18 23:37:13 2010
@@ -26,7 +26,9 @@ import java.util.Collection;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 
+@InterfaceAudience.Private
 public final class Util {
   private final static Log LOG = LogFactory.getLog(Util.class.getName());
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java Fri Jun 18 23:37:13 2010
@@ -45,6 +45,7 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -954,6 +955,7 @@ class DataBlockScanner implements Runnab
     }    
   }
   
+  @InterfaceAudience.Private
   public static class Servlet extends HttpServlet {
     private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Fri Jun 18 23:37:13 2010
@@ -48,6 +48,7 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -147,6 +148,7 @@ import org.apache.hadoop.util.DiskChecke
  * information to clients or other DataNodes that might be interested.
  *
  **********************************************************/
+@InterfaceAudience.Private
 public class DataNode extends Configured 
     implements InterDatanodeProtocol, ClientDatanodeProtocol, FSConstants, Runnable {
   public static final Log LOG = LogFactory.getLog(DataNode.class);

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java Fri Jun 18 23:37:13 2010
@@ -31,6 +31,7 @@ import java.util.Properties;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FileUtil.HardLink;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.common.GenerationStamp;
@@ -48,6 +49,7 @@ import org.apache.hadoop.util.Daemon;
  * <p>
  * @see Storage
  */
+@InterfaceAudience.Private
 public class DataStorage extends Storage {
   // Constants
   final static String BLOCK_SUBDIR_PREFIX = "subdir";

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java Fri Jun 18 23:37:13 2010
@@ -25,13 +25,13 @@ import java.util.LinkedList;
 import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -43,6 +43,7 @@ import org.apache.hadoop.hdfs.server.dat
  * Reconciles the differences with block information maintained in
  * {@link FSDataset}
  */
+@InterfaceAudience.Private
 public class DirectoryScanner {
   private static final Log LOG = LogFactory.getLog(DirectoryScanner.class);
   private static final int DEFAULT_SCAN_INTERVAL = 21600;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java Fri Jun 18 23:37:13 2010
@@ -38,6 +38,7 @@ import javax.management.NotCompliantMBea
 import javax.management.ObjectName;
 import javax.management.StandardMBean;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.DF;
 import org.apache.hadoop.fs.DU;
@@ -68,6 +69,7 @@ import org.apache.hadoop.io.IOUtils;
  * has a unique name and an extent on disk.
  *
  ***************************************************/
+@InterfaceAudience.Private
 public class FSDataset implements FSConstants, FSDatasetInterface {
 
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDatasetInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDatasetInterface.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDatasetInterface.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDatasetInterface.java Fri Jun 18 23:37:13 2010
@@ -24,9 +24,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 
-
-
-
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
 import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
@@ -42,6 +40,7 @@ import org.apache.hadoop.util.DiskChecke
  * SimulatedFSDataset (which simulates data).
  *
  */
+@InterfaceAudience.Private
 public interface FSDatasetInterface extends FSDatasetMBean {
   
   

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/Replica.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/Replica.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/Replica.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/Replica.java Fri Jun 18 23:37:13 2010
@@ -17,11 +17,13 @@
  */
 package org.apache.hadoop.hdfs.server.datanode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.ReplicaState;
 
 /** 
  * This represents block replicas which stored in DataNode.
  */
+@InterfaceAudience.Private
 public interface Replica {
   /** get block ID  */
   public long getBlockId();

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileUtil.HardLink;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -32,6 +33,7 @@ import org.apache.hadoop.io.IOUtils;
  * This class is used by datanodes to maintain meta data of its replicas.
  * It provides a general interface for meta information of a replica.
  */
+@InterfaceAudience.Private
 abstract public class ReplicaInfo extends Block implements Replica {
   private FSVolume volume;      // volume where the replica belongs
   private File     dir;         // directory where block & meta files belong

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/UpgradeObjectDatanode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/UpgradeObjectDatanode.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/UpgradeObjectDatanode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/UpgradeObjectDatanode.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.datanode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.UpgradeObject;
@@ -31,6 +32,7 @@ import java.net.SocketTimeoutException;
  * Base class for data-node upgrade objects.
  * Data-node upgrades are run in separate threads.
  */
+@InterfaceAudience.Private
 public abstract class UpgradeObjectDatanode extends UpgradeObject implements Runnable {
   private DataNode dataNode = null;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeActivityMBean.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeActivityMBean.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeActivityMBean.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeActivityMBean.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.da
 import java.util.Random;
 
 import javax.management.ObjectName;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.util.MBeanUtil;
 import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
 import org.apache.hadoop.metrics.util.MetricsRegistry;
@@ -53,6 +54,7 @@ import org.apache.hadoop.metrics.util.Me
  * from the metrics registry passed as an argument to the constructor
  */
 
+@InterfaceAudience.Private
 public class DataNodeActivityMBean extends MetricsDynamicMBeanBase {
   final private ObjectName mbeanName;
   private Random rand = new Random(); 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.datanode.metrics;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
@@ -43,6 +44,7 @@ import org.apache.hadoop.hdfs.DFSConfigK
  *  <p> {@link #blocksRead}.inc()
  *
  */
+@InterfaceAudience.Private
 public class DataNodeMetrics implements Updater {
   private final MetricsRecord metricsRecord;
   private DataNodeActivityMBean datanodeActivityMBean;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/metrics/FSDatasetMBean.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.hdfs.server.da
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * 
  * This Interface defines the methods to get the status of a the FSDataset of
@@ -34,6 +36,7 @@ import java.io.IOException;
  * @see org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeStatisticsMBean
  *
  */
+@InterfaceAudience.Private
 public interface FSDatasetMBean {
   
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
@@ -51,6 +52,7 @@ import org.apache.hadoop.net.NetUtils;
  * namespace image to local disk(s).</li>
  * </ol>
  */
+@InterfaceAudience.Private
 public class BackupNode extends NameNode {
   private static final String BN_ADDRESS_NAME_KEY = DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY;
   private static final String BN_ADDRESS_DEFAULT = DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_DEFAULT;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupStorage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupStorage.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupStorage.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BackupStorage.java Fri Jun 18 23:37:13 2010
@@ -24,6 +24,7 @@ import java.net.URI;
 import java.util.Collection;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
 import static org.apache.hadoop.hdfs.server.common.Util.now;
@@ -33,6 +34,7 @@ import org.apache.hadoop.hdfs.server.pro
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.io.LongWritable;
 
+@InterfaceAudience.Private
 public class BackupStorage extends FSImage {
   // Names of the journal spool directory and the spool file
   private static final String STORAGE_JSPOOL_DIR = "jspool";