You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by su...@apache.org on 2010/06/19 01:37:16 UTC

svn commit: r956155 [2/2] - in /hadoop/hdfs/trunk: ./ src/ant/org/apache/hadoop/ant/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/security/token/block/ src/jav...

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java Fri Jun 18 23:37:13 2010
@@ -32,6 +32,7 @@ import java.util.Random;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
@@ -48,6 +49,7 @@ import org.apache.hadoop.hdfs.DFSConfigK
  * This class is a helper class for {@link FSNamesystem} and requires several
  * methods to be called with lock held on {@link FSNamesystem}.
  */
+@InterfaceAudience.Private
 public class BlockManager {
   // Default initial capacity and load factor of map
   public static final int DEFAULT_INITIAL_MAP_CAPACITY = 16;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicy.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicy.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicy.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicy.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -29,8 +30,10 @@ import java.util.*;
  * This interface is used for choosing the desired number of targets
  * for placing block replicas.
  */
+@InterfaceAudience.Private
 public abstract class BlockPlacementPolicy {
     
+  @InterfaceAudience.Private
   public static class NotEnoughReplicasException extends Exception {
     private static final long serialVersionUID = 1L;
     NotEnoughReplicasException(String msg) {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicyDefault.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicyDefault.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicyDefault.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/BlockPlacementPolicyDefault.java Fri Jun 18 23:37:13 2010
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.server.namenode;
 
 import org.apache.commons.logging.*;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -37,6 +38,7 @@ import java.util.*;
  * that is on a different rack. The 3rd replica is placed on a datanode
  * which is on a different node of the rack as the second replica.
  */
+@InterfaceAudience.Private
 public class BlockPlacementPolicyDefault extends BlockPlacementPolicy {
   private boolean considerLoad; 
   private NetworkTopology clusterMap;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.namenode.FSImage;
 import org.apache.hadoop.io.WritableComparable;
@@ -28,6 +29,7 @@ import org.apache.hadoop.io.WritableComp
 /**
  * A unique signature intended to identify checkpoint transactions.
  */
+@InterfaceAudience.Private
 public class CheckpointSignature extends StorageInfo 
                       implements WritableComparable<CheckpointSignature> {
   private static final String FIELD_SEPARATOR = ":";

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java Fri Jun 18 23:37:13 2010
@@ -25,15 +25,16 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
-import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.znerd.xmlenc.XMLOutputter;
 
 /** Servlets for file checksum */
+@InterfaceAudience.Private
 public class ContentSummaryServlet extends DfsServlet {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CorruptReplicasMap.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CorruptReplicasMap.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CorruptReplicasMap.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/CorruptReplicasMap.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.ipc.Server;
 
@@ -31,6 +32,7 @@ import java.util.*;
  * Mapping: Block -> TreeSet<DatanodeDescriptor> 
  */
 
+@InterfaceAudience.Private
 public class CorruptReplicasMap{
 
   private SortedMap<Block, Collection<DatanodeDescriptor>> corruptReplicasMap =

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,8 @@ import java.io.DataInput;
 import java.io.IOException;
 import java.util.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -46,6 +48,7 @@ import org.apache.hadoop.io.WritableUtil
  * fsImage.
 
  **************************************************/
+@InterfaceAudience.Private
 public class DatanodeDescriptor extends DatanodeInfo {
   
   // Stores status of decommissioning.
@@ -53,6 +56,8 @@ public class DatanodeDescriptor extends 
   DecommissioningStatus decommissioningStatus = new DecommissioningStatus();
   
   /** Block and targets pair */
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
   public static class BlockTargetPair {
     public final Block block;
     public final DatanodeDescriptor[] targets;    

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java Fri Jun 18 23:37:13 2010
@@ -27,6 +27,7 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.io.Text;
@@ -37,6 +38,7 @@ import org.apache.hadoop.security.token.
 /**
  * Serve delegation tokens over http for use in hftp.
  */
+@InterfaceAudience.Private
 @SuppressWarnings("serial")
 public class DelegationTokenServlet extends DfsServlet {
   private static final Log LOG = LogFactory.getLog(DelegationTokenServlet.class);

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSClusterStats.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSClusterStats.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSClusterStats.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSClusterStats.java Fri Jun 18 23:37:13 2010
@@ -17,10 +17,13 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /** 
  * This interface is used for retrieving the load related statistics of 
  * the cluster.
  */
+@InterfaceAudience.Private
 public interface FSClusterStats {
 
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java Fri Jun 18 23:37:13 2010
@@ -26,6 +26,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Options;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -59,6 +61,8 @@ import org.apache.hadoop.security.token.
  * FSEditLog maintains a log of the namespace modifications.
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class FSEditLog {
   public  static final byte OP_INVALID = -1;
   private static final byte OP_ADD = 0;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java Fri Jun 18 23:37:13 2010
@@ -43,6 +43,8 @@ import java.util.Properties;
 import java.util.Random;
 import java.util.Set;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -76,6 +78,8 @@ import org.apache.hadoop.hdfs.DFSConfigK
  * FSImage handles checkpointing and logging of the namespace edits.
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class FSImage extends Storage {
 
   private static final SimpleDateFormat DATE_FORM =

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSInodeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSInodeInfo.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSInodeInfo.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSInodeInfo.java Fri Jun 18 23:37:13 2010
@@ -17,10 +17,13 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /** 
  * This interface is used used the pluggable block placement policy
  * to expose a few characteristics of an Inode.
  */
+@InterfaceAudience.Private
 public interface FSInodeInfo {
 
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.na
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.*;
@@ -109,6 +110,7 @@ import javax.management.StandardMBean;
  * 4)  machine --> blocklist (inverted #2)
  * 5)  LRU cache of updated-heartbeat machines
  ***************************************************/
+@InterfaceAudience.Private
 public class FSNamesystem implements FSConstants, FSNamesystemMBean, FSClusterStats {
   public static final Log LOG = LogFactory.getLog(FSNamesystem.class);
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java Fri Jun 18 23:37:13 2010
@@ -29,13 +29,13 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
-import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -45,8 +45,10 @@ import org.apache.hadoop.security.UserGr
 import org.znerd.xmlenc.XMLOutputter;
 
 /** Servlets for file checksum */
+@InterfaceAudience.Private
 public class FileChecksumServlets {
   /** Redirect file checksum queries to an appropriate datanode. */
+  @InterfaceAudience.Private
   public static class RedirectServlet extends DfsServlet {
     /** For java.io.Serializable */
     private static final long serialVersionUID = 1L;
@@ -74,6 +76,7 @@ public class FileChecksumServlets {
   }
   
   /** Get FileChecksum */
+  @InterfaceAudience.Private
   public static class GetServlet extends DfsServlet {
     /** For java.io.Serializable */
     private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Fri Jun 18 23:37:13 2010
@@ -25,8 +25,8 @@ import java.security.PrivilegedException
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -39,6 +39,7 @@ import org.apache.hadoop.security.UserGr
 /** Redirect queries about the hosted filesystem to an appropriate datanode.
  * @see org.apache.hadoop.hdfs.HftpFileSystem
  */
+@InterfaceAudience.Private
 public class FileDataServlet extends DfsServlet {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java Fri Jun 18 23:37:13 2010
@@ -26,15 +26,15 @@ import javax.servlet.ServletContext;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
-import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
  * This class is used in Namesystem's web server to do fsck on namenode.
  */
+@InterfaceAudience.Private
 public class FsckServlet extends DfsServlet {
   /** for java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java Fri Jun 18 23:37:13 2010
@@ -25,6 +25,7 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -32,6 +33,7 @@ import org.apache.hadoop.util.StringUtil
  * Typically used by the Secondary NameNode to retrieve image and
  * edit file for periodic checkpointing.
  */
+@InterfaceAudience.Private
 public class GetImageServlet extends HttpServlet {
   private static final long serialVersionUID = -7669068179452648952L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.permission.PermissionStatus;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -26,6 +27,7 @@ import org.apache.hadoop.hdfs.protocol.B
 /**
  * An INode representing a symbolic link.
  */
+@InterfaceAudience.Private
 public class INodeSymlink extends INode {
   private byte[] symlink; // The target URI
 
@@ -75,4 +77,4 @@ public class INodeSymlink extends INode 
   public boolean isDirectory() {
     return false;
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseExpiredException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseExpiredException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseExpiredException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseExpiredException.java Fri Jun 18 23:37:13 2010
@@ -20,9 +20,14 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * The lease that was being used to create this file has expired.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class LeaseExpiredException extends IOException {
   private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java Fri Jun 18 23:37:13 2010
@@ -29,6 +29,7 @@ import java.util.TreeSet;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
@@ -56,6 +57,7 @@ import static org.apache.hadoop.hdfs.ser
  *      and removes the lease once all files have been removed
  * 2.10) Namenode commit changes to edit log
  */
+@InterfaceAudience.Private
 public class LeaseManager {
   public static final Log LOG = LogFactory.getLog(LeaseManager.class);
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.HftpFileSystem;
@@ -45,6 +46,7 @@ import javax.servlet.http.HttpServletRes
  * Obtain meta-information about a filesystem.
  * @see org.apache.hadoop.hdfs.HftpFileSystem
  */
+@InterfaceAudience.Private
 public class ListPathsServlet extends DfsServlet {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Fri Jun 18 23:37:13 2010
@@ -28,6 +28,7 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.CreateFlag;
@@ -129,6 +130,7 @@ import org.apache.hadoop.util.StringUtil
  * secondary namenodes or rebalancing processes to get partial namenode's
  * state, for example partial blocksMap etc.
  **********************************************************/
+@InterfaceAudience.Private
 public class NameNode implements NamenodeProtocols, FSConstants {
   static{
     Configuration.addDefaultResource("hdfs-default.xml");

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java Fri Jun 18 23:37:13 2010
@@ -31,6 +31,7 @@ import java.util.TreeSet;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
@@ -70,6 +71,7 @@ import org.apache.hadoop.security.Access
  *  optionally can print detailed statistics on block locations and replication
  *  factors of each file.
  */
+@InterfaceAudience.Private
 public class NamenodeFsck {
   public static final Log LOG = LogFactory.getLog(NameNode.class.getName());
   

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NotReplicatedYetException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NotReplicatedYetException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NotReplicatedYetException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NotReplicatedYetException.java Fri Jun 18 23:37:13 2010
@@ -20,9 +20,14 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * The file has not finished being written to enough datanodes yet.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class NotReplicatedYetException extends IOException {
   private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SafeModeException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SafeModeException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SafeModeException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SafeModeException.java Fri Jun 18 23:37:13 2010
@@ -20,11 +20,16 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * This exception is thrown when the name node is in safe mode.
  * Client cannot modified namespace until the safe mode is off. 
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class SafeModeException extends IOException {
   private static final long serialVersionUID = 1L;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Fri Jun 18 23:37:13 2010
@@ -28,6 +28,7 @@ import java.util.Iterator;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DFSUtil.ErrorSimulator;
@@ -63,6 +64,7 @@ import org.apache.hadoop.util.StringUtil
  *
  **********************************************************/
 @Deprecated // use BackupNode with -checkpoint argument instead.
+@InterfaceAudience.Private
 public class SecondaryNameNode implements Runnable {
     
   public static final Log LOG = 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java Fri Jun 18 23:37:13 2010
@@ -27,15 +27,17 @@ import java.util.List;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.mortbay.jetty.InclusiveByteRange;
 
+@InterfaceAudience.Private
 public class StreamFile extends DfsServlet {
   /** for java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java Fri Jun 18 23:37:13 2010
@@ -20,9 +20,14 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * This exception is thrown when an operation is not supported.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class UnsupportedActionException extends IOException {
   /** for java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UpgradeObjectNamenode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UpgradeObjectNamenode.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UpgradeObjectNamenode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/UpgradeObjectNamenode.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.UpgradeObject;
 import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
@@ -27,6 +28,7 @@ import org.apache.hadoop.hdfs.server.pro
  * Base class for name-node upgrade objects.
  * Data-node upgrades are run in separate threads.
  */
+@InterfaceAudience.Private
 public abstract class UpgradeObjectNamenode extends UpgradeObject {
 
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.metrics;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * 
  * This Interface defines the methods to get the status of a the FSNamesystem of
@@ -33,6 +35,7 @@ package org.apache.hadoop.hdfs.server.na
  * @see org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeActivityMBean
  *
  */
+@InterfaceAudience.Private
 public interface FSNamesystemMBean {
 
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMetrics.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMetrics.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMetrics.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.na
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -42,6 +43,7 @@ import org.apache.hadoop.metrics.util.Me
  *  <p> {@link #filesTotal}.set()
  *
  */
+@InterfaceAudience.Private
 public class FSNamesystemMetrics implements Updater {
   private static Log log = LogFactory.getLog(FSNamesystemMetrics.class);
   private final MetricsRecord metricsRecord;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeActivityMBean.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeActivityMBean.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeActivityMBean.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeActivityMBean.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.na
 
 import javax.management.ObjectName;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.util.MBeanUtil;
 import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
 import org.apache.hadoop.metrics.util.MetricsRegistry;
@@ -52,6 +53,7 @@ import org.apache.hadoop.metrics.util.Me
  * from the metrics registry passed as an argument to the constructor
  */
 
+@InterfaceAudience.Private
 public class NameNodeActivityMBean extends MetricsDynamicMBeanBase {
   final private ObjectName mbeanName;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.na
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -42,6 +43,7 @@ import org.apache.hadoop.metrics.util.Me
  *  <p> {@link #syncs}.inc()
  *
  */
+@InterfaceAudience.Private
 public class NameNodeMetrics implements Updater {
     private static Log log = LogFactory.getLog(NameNodeMetrics.class);
     private final MetricsRecord metricsRecord;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockCommand.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.server.pr
 import java.io.*;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor.BlockTargetPair;
@@ -34,6 +36,8 @@ import org.apache.hadoop.io.*;
  * another DataNode.
  * 
  ****************************************************/
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class BlockCommand extends DatanodeCommand {
   Block blocks[];
   DatanodeInfo targets[][];

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlockRecoveryCommand.java Fri Jun 18 23:37:13 2010
@@ -23,6 +23,8 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.ArrayList;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -40,6 +42,8 @@ import org.apache.hadoop.io.WritableFact
  * Block recovery is identified by a recoveryId, which is also the new
  * generation stamp, which the block will have after the recovery succeeds.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class BlockRecoveryCommand extends DatanodeCommand {
   Collection<RecoveringBlock> recoveringBlocks;
 
@@ -50,6 +54,8 @@ public class BlockRecoveryCommand extend
    * 
    * The new generation stamp of the block, also plays role of the recovery id.
    */
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
   public static class RecoveringBlock extends LocatedBlock {
     private long newGenerationStamp;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlocksWithLocations.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlocksWithLocations.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlocksWithLocations.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/BlocksWithLocations.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -30,11 +32,15 @@ import org.apache.hadoop.io.WritableUtil
  *  It provide efficient customized serialization/deserialization methods
  *  in stead of using the default array (de)serialization provided by RPC
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class BlocksWithLocations implements Writable {
 
   /**
    * A class to keep track of a block and its locations
    */
+  @InterfaceAudience.Private
+  @InterfaceStability.Evolving
   public static class BlockWithLocations  implements Writable {
     Block block;
     String datanodeIDs[];

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/CheckpointCommand.java Fri Jun 18 23:37:13 2010
@@ -24,6 +24,8 @@ import java.io.IOException;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
 
 /**
@@ -41,6 +43,8 @@ import org.apache.hadoop.hdfs.server.nam
  * upon completion of the checkpoint.</li>
  * </ul>
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class CheckpointCommand extends NamenodeCommand {
   private CheckpointSignature cSig;
   private boolean isImageObsolete;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeCommand.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,9 @@ package org.apache.hadoop.hdfs.server.pr
 
 import java.io.DataInput;
 import java.io.DataOutput;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactory;
 import org.apache.hadoop.io.WritableFactories;
@@ -35,6 +38,8 @@ import org.apache.avro.reflect.Union;
       BlockCommand.class, UpgradeCommand.class,
       BlockRecoveryCommand.class, KeyUpdateCommand.class})
 
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public abstract class DatanodeCommand extends ServerCommand {
   static class Register extends DatanodeCommand {
     private Register() {super(DatanodeProtocol.DNA_REGISTER);}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.pr
 
 import java.io.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -40,6 +41,7 @@ import org.apache.avro.reflect.Nullable;
 @KerberosInfo(
     serverPrincipal = DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, 
     clientPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY)
+@InterfaceAudience.Private
 public interface DatanodeProtocol extends VersionedProtocol {
   /**
    * 24: register() renamed registerDatanode()

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
 import org.apache.hadoop.hdfs.server.common.Storage;
@@ -36,6 +38,8 @@ import org.apache.hadoop.io.WritableFact
  * to identify and verify a data-node when it contacts the name-node.
  * This information is sent by data-node with each communication request.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DatanodeRegistration extends DatanodeID
 implements Writable, NodeRegistration {
   static {                                      // register a ctor

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DisallowedDatanodeException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DisallowedDatanodeException.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DisallowedDatanodeException.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/DisallowedDatanodeException.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.server.pr
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 
 
@@ -29,6 +31,8 @@ import org.apache.hadoop.hdfs.protocol.D
  * or has been specifically excluded.
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class DisallowedDatanodeException extends IOException {
   /** for java.io.Serializable */
   private static final long serialVersionUID = 1L;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/InterDatanodeProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/InterDatanodeProtocol.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/InterDatanodeProtocol.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/InterDatanodeProtocol.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,7 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
@@ -33,6 +34,7 @@ import org.apache.hadoop.security.Kerber
 @KerberosInfo(
     serverPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY,
     clientPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY)
+@InterfaceAudience.Private
 public interface InterDatanodeProtocol extends VersionedProtocol {
   public static final Log LOG = LogFactory.getLog(InterDatanodeProtocol.class);
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/KeyUpdateCommand.java Fri Jun 18 23:37:13 2010
@@ -21,11 +21,15 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
 
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class KeyUpdateCommand extends DatanodeCommand {
   private ExportedBlockKeys keys;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeCommand.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs.server.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
@@ -25,6 +27,8 @@ import org.apache.hadoop.io.WritableFact
  * Base class for name-node command.
  * Issued by the name-node to notify other name-nodes what should be done.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class NamenodeCommand extends ServerCommand {
   static {
     WritableFactories.setFactory(NamenodeCommand.class,

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocol.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.pr
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
@@ -34,6 +35,7 @@ import org.apache.hadoop.security.Kerber
 @KerberosInfo(
     serverPrincipal = DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
     clientPrincipal = DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)
+@InterfaceAudience.Private
 public interface NamenodeProtocol extends VersionedProtocol {
   /**
    * Compared to the previous version the following changes have been introduced:

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java Fri Jun 18 23:37:13 2010
@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.hdfs.server.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
 import org.apache.hadoop.security.RefreshUserMappingsProtocol;
 
 /** The full set of RPC methods implemented by the Namenode.  */
+@InterfaceAudience.Private
 public interface NamenodeProtocols
   extends ClientProtocol,
           DatanodeProtocol,

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeRegistration.java Fri Jun 18 23:37:13 2010
@@ -26,6 +26,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.NamenodeRole;
@@ -34,6 +36,8 @@ import org.apache.hadoop.hdfs.server.com
  * Information sent by a subordinate name-node to the active name-node
  * during the registration process. 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class NamenodeRegistration extends StorageInfo
 implements NodeRegistration {
   String rpcAddress;          // RPC address of the node

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
@@ -35,6 +37,8 @@ import org.apache.hadoop.io.WritableFact
  * to a data-node handshake.
  * 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class NamespaceInfo extends StorageInfo {
   String  buildVersion;
   int distributedUpgradeVersion;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NodeRegistration.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NodeRegistration.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NodeRegistration.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/NodeRegistration.java Fri Jun 18 23:37:13 2010
@@ -18,10 +18,13 @@
 
 package org.apache.hadoop.hdfs.server.protocol;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Generic class specifying information, which need to be sent to the name-node
  * during the registration process. 
  */
+@InterfaceAudience.Private
 public interface NodeRegistration {
   /**
    * Get address of the server node.

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ReplicaRecoveryInfo.java Fri Jun 18 23:37:13 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.ReplicaState;
 import org.apache.hadoop.io.Writable;
@@ -31,6 +33,8 @@ import org.apache.hadoop.io.WritableFact
 /**
  * Replica recovery information.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class ReplicaRecoveryInfo extends Block {
   private ReplicaState originalState;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java Fri Jun 18 23:37:13 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.hdfs.server.pr
 
 import java.io.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -29,6 +31,8 @@ import org.apache.hadoop.io.Writable;
  * @see DatanodeProtocol
  * @see NamenodeProtocol
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public abstract class ServerCommand implements Writable {
   private int action;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/protocol/UpgradeCommand.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
@@ -35,6 +37,8 @@ import org.apache.hadoop.io.WritableFact
  * The upgrade command contains version of the upgrade, which is verified 
  * on the receiving side and current status of the upgrade.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class UpgradeCommand extends DatanodeCommand {
   final static int UC_ACTION_UNKNOWN = DatanodeProtocol.DNA_UNKNOWN;
   public final static int UC_ACTION_REPORT_STATUS = 100; // report upgrade status

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java Fri Jun 18 23:37:13 2010
@@ -24,6 +24,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.TreeSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
@@ -53,6 +54,7 @@ import org.apache.hadoop.util.ToolRunner
 /**
  * This class provides some DFS administrative access.
  */
+@InterfaceAudience.Private
 public class DFSAdmin extends FsShell {
 
   static{

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java Fri Jun 18 23:37:13 2010
@@ -25,6 +25,7 @@ import java.net.URL;
 import java.net.URLConnection;
 import java.net.URLEncoder;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck;
@@ -58,6 +59,7 @@ import org.apache.hadoop.util.ToolRunner
  *  The tool also provides and option to filter open files during the scan.
  *  
  */
+@InterfaceAudience.Private
 public class DFSck extends Configured implements Tool {
   static{
     Configuration.addDefaultResource("hdfs-default.xml");

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java Fri Jun 18 23:37:13 2010
@@ -27,6 +27,7 @@ import java.net.URL;
 import java.net.URLConnection;
 import java.security.PrivilegedExceptionAction;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
@@ -44,6 +45,7 @@ import org.apache.hadoop.util.StringUtil
  * Fetch a DelegationToken from the current Namenode and store it in the
  * specified file.
  */
+@InterfaceAudience.Private
 public class DelegationTokenFetcher {
   private static final String USAGE =
     "fetchdt retrieves delegation tokens (optionally over http)\n" +

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/HDFSConcat.java Fri Jun 18 23:37:13 2010
@@ -20,12 +20,14 @@ package org.apache.hadoop.hdfs.tools;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 
 
+@InterfaceAudience.Private
 public class HDFSConcat {
   private final static String def_uri = "hdfs://localhost:9000";
   /**

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java Fri Jun 18 23:37:13 2010
@@ -42,6 +42,8 @@ import org.apache.commons.cli.OptionBuil
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * tool to get data from NameNode or DataNode using MBeans currently the
  * following MBeans are available (under hadoop domain):
@@ -58,6 +60,7 @@ import org.apache.commons.cli.ParseExcep
  * implementation note: all logging is sent to System.err (since it is a command
  * line tool)
  */
+@InterfaceAudience.Private
 public class JMXGet {
 
   private static final String format = "%s=%s\n";

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java Fri Jun 18 23:37:13 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.tools.off
 import java.io.DataInputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * An ImageLoader can accept a DataInputStream to an Hadoop FSImage file
  * and walk over its structure using the supplied ImageVisitor.
@@ -54,6 +56,7 @@ interface ImageLoader {
    * Factory for obtaining version of image loader that can read
    * a particular image format.
    */
+  @InterfaceAudience.Private
   public class LoaderFactory {
     // Java doesn't support static methods on interfaces, which necessitates
     // this factory class

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/NameDistributionVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/NameDistributionVisitor.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/NameDistributionVisitor.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/NameDistributionVisitor.java Fri Jun 18 23:37:13 2010
@@ -21,6 +21,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * File name distribution visitor. 
  * <p>
@@ -30,6 +32,7 @@ import java.util.Map.Entry;
  * these same names</li>
  * <li>Heap saved if the file name objects are reused</li>
  */
+@InterfaceAudience.Private
 public class NameDistributionVisitor extends TextWriterImageVisitor {
   HashMap<String, Integer> counts = new HashMap<String, Integer>();
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java Fri Jun 18 23:37:13 2010
@@ -31,11 +31,14 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.PosixParser;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * OfflineImageViewer to dump the contents of an Hadoop image file to XML
  * or the console.  Main entry point into utility, either via the
  * command line or programatically.
  */
+@InterfaceAudience.Private
 public class OfflineImageViewer {
   private final static String usage = 
     "Usage: bin/hdfs oiv [OPTIONS] -i INPUTFILE -o OUTPUTFILE\n" +

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/ByteArray.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/ByteArray.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/ByteArray.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/ByteArray.java Fri Jun 18 23:37:13 2010
@@ -19,9 +19,12 @@ package org.apache.hadoop.hdfs.util;
 
 import java.util.Arrays;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /** 
  * Wrapper for byte[] to use byte[] as key in HashMap
  */
+@InterfaceAudience.Private
 public class ByteArray {
   private int hash = 0; // cache the hash code
   private final byte[] bytes;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSet.java Fri Jun 18 23:37:13 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs.util;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * A {@link GSet} is set,
  * which supports the {@link #get(Object)} operation.
@@ -27,6 +29,7 @@ package org.apache.hadoop.hdfs.util;
  * @param <K> The type of the keys.
  * @param <E> The type of the elements, which must be a subclass of the keys.
  */
+@InterfaceAudience.Private
 public interface GSet<K, E extends K> extends Iterable<E> {
   /**
    * @return The size of this set.
@@ -78,4 +81,4 @@ public interface GSet<K, E extends K> ex
     * @throws NullPointerException if key == null.
   */
   E remove(K key);
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java Fri Jun 18 23:37:13 2010
@@ -20,9 +20,12 @@ package org.apache.hadoop.hdfs.util;
 import java.util.HashMap;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * A {@link GSet} implementation by {@link HashMap}.
  */
+@InterfaceAudience.Private
 public class GSetByHashMap<K, E extends K> implements GSet<K, E> {
   private final HashMap<K, E> m;
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java?rev=956155&r1=956154&r2=956155&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java Fri Jun 18 23:37:13 2010
@@ -23,6 +23,7 @@ import java.util.Iterator;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 
 /**
@@ -42,6 +43,7 @@ import org.apache.hadoop.HadoopIllegalAr
  *       (1) a subclass of K, and
  *       (2) implementing {@link LinkedElement} interface.
  */
+@InterfaceAudience.Private
 public class LightWeightGSet<K, E extends K> implements GSet<K, E> {
   /**
    * Elements of {@link LightWeightGSet}.