You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2010/06/11 23:35:05 UTC

svn commit: r953881 [3/5] - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/ src/java/org/apache/hadoop/classification/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/ftp/ src/java/org/apache/hadoop...

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableName.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableName.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableName.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableName.java Fri Jun 11 21:34:56 2010
@@ -21,11 +21,15 @@ package org.apache.hadoop.io;
 import java.util.HashMap;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 /** Utility to permit renaming of Writable implementation classes without
  * invalidiating files that contain their class name.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class WritableName {
   private static HashMap<String, Class<?>> NAME_TO_CLASS =
     new HashMap<String, Class<?>>();

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableUtils.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/WritableUtils.java Fri Jun 11 21:34:56 2010
@@ -20,12 +20,16 @@ package org.apache.hadoop.io;
 
 import java.io.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public final class WritableUtils  {
 
   public static byte[] readCompressedByteArray(DataInput in) throws IOException {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BZip2Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BZip2Codec.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BZip2Codec.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BZip2Codec.java Fri Jun 11 21:34:56 2010
@@ -24,6 +24,8 @@ import java.io.InputStream;
 import java.io.OutputStream;
 
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Seekable;
 import org.apache.hadoop.io.compress.bzip2.BZip2Constants;
 import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor;
@@ -38,6 +40,8 @@ import org.apache.hadoop.io.compress.bzi
  * CompressionCodec which have a Compressor or Decompressor type argument, throw
  * UnsupportedOperationException.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class BZip2Codec implements SplittableCompressionCodec {
 
   private static final String HEADER = "BZ";

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockCompressorStream.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,9 @@ package org.apache.hadoop.io.compress;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A {@link org.apache.hadoop.io.compress.CompressorStream} which works
  * with 'block-based' based compression algorithms, as opposed to 
@@ -31,6 +34,8 @@ import java.io.OutputStream;
  * {@link org.apache.hadoop.io.compress.Compressor} requires buffering to
  * effect meaningful compression, it is responsible for it.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class BlockCompressorStream extends CompressorStream {
 
   // The 'maximum' size of input data to be compressed, to account

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java Fri Jun 11 21:34:56 2010
@@ -22,12 +22,17 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A {@link org.apache.hadoop.io.compress.DecompressorStream} which works
  * with 'block-based' based compression algorithms, as opposed to 
  * 'stream-based' compression algorithms.
  *  
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class BlockDecompressorStream extends DecompressorStream {
   private int originalBlockSize = 0;
   private int noUncompressedBytes = 0;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java Fri Jun 11 21:34:56 2010
@@ -24,6 +24,8 @@ import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -31,6 +33,8 @@ import org.apache.hadoop.util.Reflection
  * A global compressor/decompressor pool used to save and reuse 
  * (possibly native) compression/decompression codecs.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class CodecPool {
   private static final Log LOG = LogFactory.getLog(CodecPool.class);
   

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodec.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodec.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodec.java Fri Jun 11 21:34:56 2010
@@ -22,9 +22,14 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * This class encapsulates a streaming compression/decompression pair.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface CompressionCodec {
 
   /**

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,8 @@ import java.util.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -28,6 +30,8 @@ import org.apache.hadoop.util.Reflection
 /**
  * A factory that will find the correct codec for a given filename.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class CompressionCodecFactory {
 
   public static final Log LOG =

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.io.compress;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.PositionedReadable;
 import org.apache.hadoop.fs.Seekable;
 /**
@@ -30,7 +32,8 @@ import org.apache.hadoop.fs.Seekable;
  * reposition the underlying input stream then call {@link #resetState()},
  * without having to also synchronize client buffers.
  */
-
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class CompressionInputStream extends InputStream implements Seekable {
   /**
    * The input stream to be compressed. 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionOutputStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionOutputStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressionOutputStream.java Fri Jun 11 21:34:56 2010
@@ -21,9 +21,14 @@ package org.apache.hadoop.io.compress;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A compression output stream.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class CompressionOutputStream extends OutputStream {
   /**
    * The output stream to be compressed. 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Compressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Compressor.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Compressor.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Compressor.java Fri Jun 11 21:34:56 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.io.compress;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -28,6 +30,8 @@ import org.apache.hadoop.conf.Configurat
  * This is modelled after {@link java.util.zip.Deflater}
  * 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface Compressor {
   /**
    * Sets input data for compression. 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java Fri Jun 11 21:34:56 2010
@@ -21,9 +21,13 @@ package org.apache.hadoop.io.compress;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.compress.CompressionOutputStream;
 import org.apache.hadoop.io.compress.Compressor;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class CompressorStream extends CompressionOutputStream {
   protected Compressor compressor;
   protected byte[] buffer;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Decompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Decompressor.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Decompressor.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/Decompressor.java Fri Jun 11 21:34:56 2010
@@ -20,12 +20,17 @@ package org.apache.hadoop.io.compress;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Specification of a stream-based 'de-compressor' which can be  
  * plugged into a {@link CompressionInputStream} to compress data.
  * This is modelled after {@link java.util.zip.Inflater}
  * 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface Decompressor {
   /**
    * Sets input data for decompression. 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DecompressorStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DecompressorStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DecompressorStream.java Fri Jun 11 21:34:56 2010
@@ -22,8 +22,12 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.compress.Decompressor;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class DecompressorStream extends CompressionInputStream {
   protected Decompressor decompressor = null;
   protected byte[] buffer;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java Fri Jun 11 21:34:56 2010
@@ -22,10 +22,14 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.zlib.*;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class DefaultCodec implements Configurable, CompressionCodec {
   
   Configuration conf;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/GzipCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/GzipCodec.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/GzipCodec.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/GzipCodec.java Fri Jun 11 21:34:56 2010
@@ -22,6 +22,8 @@ import java.io.*;
 import java.util.zip.GZIPOutputStream;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.io.compress.zlib.*;
@@ -31,11 +33,14 @@ import org.apache.hadoop.io.compress.zli
 /**
  * This class creates gzip compressors/decompressors. 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class GzipCodec extends DefaultCodec {
   /**
    * A bridge that wraps around a DeflaterOutputStream to make it 
    * a CompressionOutputStream.
    */
+  @InterfaceStability.Evolving
   protected static class GzipOutputStream extends CompressorStream {
 
     private static class ResetableGZIPOutputStream extends GZIPOutputStream {
@@ -87,6 +92,7 @@ public class GzipCodec extends DefaultCo
     }
   }
   
+  @InterfaceStability.Evolving
   protected static class GzipInputStream extends DecompressorStream {
     
     private static class ResetableGZIPInputStream extends GZIPInputStream {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplitCompressionInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplitCompressionInputStream.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplitCompressionInputStream.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplitCompressionInputStream.java Fri Jun 11 21:34:56 2010
@@ -20,11 +20,16 @@ package org.apache.hadoop.io.compress;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * An InputStream covering a range of compressed data. The start and end
  * offsets requested by a client may be modified by the codec to fit block
  * boundaries or other algorithm-dependent requirements.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class SplitCompressionInputStream
     extends CompressionInputStream {
 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,9 @@ package org.apache.hadoop.io.compress;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 
 /**
  * This interface is meant to be implemented by those compression codecs
@@ -40,6 +43,8 @@ import java.io.InputStream;
  * an input compressed file can be split and hence can be worked on by multiple
  * machines in parallel.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface SplittableCompressionCodec extends CompressionCodec {
 
   /**

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/bzip2/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/bzip2/package-info.java?rev=953881&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/bzip2/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/bzip2/package-info.java Fri Jun 11 21:34:56 2010
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.io.compress.bzip2;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+

Propchange: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/bzip2/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/package-info.java?rev=953881&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/package-info.java Fri Jun 11 21:34:56 2010
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+package org.apache.hadoop.io.compress.zlib;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+

Propchange: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/ByteArray.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/ByteArray.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/ByteArray.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/ByteArray.java Fri Jun 11 21:34:56 2010
@@ -17,12 +17,16 @@
 
 package org.apache.hadoop.io.file.tfile;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.BytesWritable;
 
 /**
  * Adaptor class to wrap byte-array backed objects (including java byte array)
  * as RawComparable objects.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
 public final class ByteArray implements RawComparable {
   private final byte[] buffer;
   private final int offset;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockAlreadyExists.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockAlreadyExists.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockAlreadyExists.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockAlreadyExists.java Fri Jun 11 21:34:56 2010
@@ -19,10 +19,15 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Exception - Meta Block with the same name already exists.
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MetaBlockAlreadyExists extends IOException {
   /**
    * Constructor

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockDoesNotExist.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockDoesNotExist.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockDoesNotExist.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/MetaBlockDoesNotExist.java Fri Jun 11 21:34:56 2010
@@ -19,10 +19,15 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Exception - No such Meta Block with the given name.
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MetaBlockDoesNotExist extends IOException {
   /**
    * Constructor

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/RawComparable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/RawComparable.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/RawComparable.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/RawComparable.java Fri Jun 11 21:34:56 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.io.file.tfile;
 import java.util.Collections;
 import java.util.Comparator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.RawComparator;
 
 /**
@@ -33,6 +35,8 @@ import org.apache.hadoop.io.RawComparato
  * RawComparable are supposed to be semantically comparable with the same
  * RawComparator.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface RawComparable {
   /**
    * Get the underlying byte array.

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/TFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/TFile.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/TFile.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/TFile.java Fri Jun 11 21:34:56 2010
@@ -31,6 +31,8 @@ import java.util.Comparator;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -126,6 +128,8 @@ import org.apache.hadoop.io.serializer.J
  * Some design rationale behind TFile can be found at <a
  * href=https://issues.apache.org/jira/browse/HADOOP-3315>Hadoop-3315</a>.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class TFile {
   static final Log LOG = LogFactory.getLog(TFile.class);
 
@@ -198,6 +202,7 @@ public class TFile {
   /**
    * TFile Writer.
    */
+  @InterfaceStability.Evolving
   public static class Writer implements Closeable {
     // minimum compressed size for a block.
     private final int sizeMinBlock;
@@ -674,6 +679,7 @@ public class TFile {
    * {@link Reader#createScannerByKey(byte[], byte[])} or
    * {@link Reader#createScannerByKey(RawComparable, RawComparable)}).
    */
+  @InterfaceStability.Evolving
   public static class Reader implements Closeable {
     // The underlying BCFile reader.
     final BCFile.Reader readerBCF;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Utils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Utils.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Utils.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Utils.java Fri Jun 11 21:34:56 2010
@@ -23,11 +23,15 @@ import java.io.IOException;
 import java.util.Comparator;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
 
 /**
  * Supporting Utility classes used by TFile, and shared by users of TFile.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public final class Utils {
 
   /**

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/package-info.java?rev=953881&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/package-info.java Fri Jun 11 21:34:56 2010
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+package org.apache.hadoop.io.retry;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Propchange: hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Deserializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Deserializer.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Deserializer.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Deserializer.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,9 @@ package org.apache.hadoop.io.serializer;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * <p>
  * Provides a facility for deserializing objects of type <T> from an
@@ -34,6 +37,8 @@ import java.io.InputStream;
  * </p>
  * @param <T>
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public interface Deserializer<T> {
   /**
    * <p>Prepare the deserializer for reading.</p>

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.io.serializer;
 import java.io.IOException;
 import java.util.Comparator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.InputBuffer;
 import org.apache.hadoop.io.RawComparator;
 
@@ -37,6 +39,8 @@ import org.apache.hadoop.io.RawComparato
  * </p>
  * @param <T>
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public abstract class DeserializerComparator<T> implements RawComparator<T> {
   
   private InputBuffer buffer = new InputBuffer();

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java Fri Jun 11 21:34:56 2010
@@ -26,6 +26,8 @@ import java.io.OutputStream;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.RawComparator;
 
 /**
@@ -34,6 +36,8 @@ import org.apache.hadoop.io.RawComparato
  * </p>
  * @see JavaSerializationComparator
  */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
 public class JavaSerialization implements Serialization<Serializable> {
 
   static class JavaSerializationDeserializer<T extends Serializable>
@@ -89,14 +93,17 @@ public class JavaSerialization implement
 
   }
 
+  @InterfaceAudience.Private
   public boolean accept(Class<?> c) {
     return Serializable.class.isAssignableFrom(c);
   }
 
+  @InterfaceAudience.Private
   public Deserializer<Serializable> getDeserializer(Class<Serializable> c) {
     return new JavaSerializationDeserializer<Serializable>();
   }
 
+  @InterfaceAudience.Private
   public Serializer<Serializable> getSerializer(Class<Serializable> c) {
     return new JavaSerializationSerializer();
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.io.serializer;
 import java.io.IOException;
 import java.io.Serializable;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.RawComparator;
 
 /**
@@ -32,13 +34,17 @@ import org.apache.hadoop.io.RawComparato
  * @param <T>
  * @see JavaSerialization
  */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
 public class JavaSerializationComparator<T extends Serializable&Comparable<T>>
   extends DeserializerComparator<T> {
 
+  @InterfaceAudience.Private
   public JavaSerializationComparator() throws IOException {
     super(new JavaSerialization.JavaSerializationDeserializer<T>());
   }
 
+  @InterfaceAudience.Private
   public int compare(T o1, T o2) {
     return o1.compareTo(o2);
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serialization.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serialization.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serialization.java Fri Jun 11 21:34:56 2010
@@ -18,12 +18,17 @@
 
 package org.apache.hadoop.io.serializer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * <p>
  * Encapsulates a {@link Serializer}/{@link Deserializer} pair.
  * </p>
  * @param <T>
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public interface Serialization<T> {
   
   /**

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java Fri Jun 11 21:34:56 2010
@@ -23,6 +23,8 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
@@ -35,6 +37,8 @@ import org.apache.hadoop.util.StringUtil
  * A factory for {@link Serialization}s.
  * </p>
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public class SerializationFactory extends Configured {
   
   private static final Log LOG =

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serializer.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serializer.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/Serializer.java Fri Jun 11 21:34:56 2010
@@ -21,6 +21,9 @@ package org.apache.hadoop.io.serializer;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * <p>
  * Provides a facility for serializing objects of type <T> to an
@@ -34,7 +37,8 @@ import java.io.OutputStream;
  * </p>
  * @param <T>
  */
-@Deprecated
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public interface Serializer<T> {
   /**
    * <p>Prepare the serializer for writing.</p>

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java Fri Jun 11 21:34:56 2010
@@ -25,6 +25,8 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Writable;
@@ -35,6 +37,8 @@ import org.apache.hadoop.util.Reflection
  * {@link Writable#write(java.io.DataOutput)} and
  * {@link Writable#readFields(java.io.DataInput)}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class WritableSerialization extends Configured
 	implements Serialization<Writable> {
   static class WritableDeserializer extends Configured
@@ -103,16 +107,19 @@ public class WritableSerialization exten
 
   }
 
+  @InterfaceAudience.Private
   @Override
   public boolean accept(Class<?> c) {
     return Writable.class.isAssignableFrom(c);
   }
 
+  @InterfaceAudience.Private
   @Override
   public Serializer<Writable> getSerializer(Class<Writable> c) {
     return new WritableSerializer();
   }
   
+  @InterfaceAudience.Private
   @Override
   public Deserializer<Writable> getDeserializer(Class<Writable> c) {
     return new WritableDeserializer(getConf(), c);

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java Fri Jun 11 21:34:56 2010
@@ -18,11 +18,16 @@
 
 package org.apache.hadoop.io.serializer.avro;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Tag interface for Avro 'reflect' serializable classes. Classes implementing 
  * this interface can be serialized/deserialized using 
  * {@link AvroReflectSerialization}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface AvroReflectSerializable {
 
 }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java Fri Jun 11 21:34:56 2010
@@ -27,15 +27,19 @@ import org.apache.avro.io.DatumWriter;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.reflect.ReflectDatumReader;
 import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 
 /**
  * Serialization for Avro Reflect classes. For a class to be accepted by this 
  * serialization, it must either be in the package list configured via 
- * {@link AvroReflectSerialization#AVRO_REFLECT_PACKAGES} or implement 
+ * <code>avro.reflect.pkgs</code> or implement 
  * {@link AvroReflectSerializable} interface.
  *
  */
 @SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class AvroReflectSerialization extends AvroSerialization<Object>{
 
   /**
@@ -43,10 +47,12 @@ public class AvroReflectSerialization ex
    * deserialized using this class. Multiple packages can be specified using 
    * comma-separated list.
    */
+  @InterfaceAudience.Private
   public static final String AVRO_REFLECT_PACKAGES = "avro.reflect.pkgs";
 
   private Set<String> packages; 
 
+  @InterfaceAudience.Private
   @Override
   public synchronized boolean accept(Class<?> c) {
     if (packages == null) {
@@ -66,6 +72,7 @@ public class AvroReflectSerialization ex
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public DatumReader getReader(Class<Object> clazz) {
     try {
@@ -75,11 +82,13 @@ public class AvroReflectSerialization ex
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public Schema getSchema(Object t) {
     return ReflectData.get().getSchema(t.getClass());
   }
 
+  @InterfaceAudience.Private
   @Override
   public DatumWriter getWriter(Class<Object> clazz) {
     return new ReflectDatumWriter();

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java Fri Jun 11 21:34:56 2010
@@ -28,6 +28,8 @@ import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DatumReader;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.DecoderFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.Serialization;
@@ -36,15 +38,20 @@ import org.apache.hadoop.io.serializer.S
 /**
  * Base class for providing serialization to Avro types.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class AvroSerialization<T> extends Configured 
 	implements Serialization<T>{
   
+  @InterfaceAudience.Private
   public static final String AVRO_SCHEMA_KEY = "Avro-Schema";
 
+  @InterfaceAudience.Private
   public Deserializer<T> getDeserializer(Class<T> c) {
     return new AvroDeserializer(c);
   }
 
+  @InterfaceAudience.Private
   public Serializer<T> getSerializer(Class<T> c) {
     return new AvroSerializer(c);
   }
@@ -52,16 +59,19 @@ public abstract class AvroSerialization<
   /**
    * Return an Avro Schema instance for the given class.
    */
+  @InterfaceAudience.Private
   public abstract Schema getSchema(T t);
 
   /**
    * Create and return Avro DatumWriter for the given class.
    */
+  @InterfaceAudience.Private
   public abstract DatumWriter<T> getWriter(Class<T> clazz);
 
   /**
    * Create and return Avro DatumReader for the given class.
    */
+  @InterfaceAudience.Private
   public abstract DatumReader<T> getReader(Class<T> clazz);
 
   class AvroSerializer implements Serializer<T> {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java Fri Jun 11 21:34:56 2010
@@ -24,20 +24,26 @@ import org.apache.avro.io.DatumWriter;
 import org.apache.avro.specific.SpecificDatumReader;
 import org.apache.avro.specific.SpecificDatumWriter;
 import org.apache.avro.specific.SpecificRecord;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 
 /**
  * Serialization for Avro Specific classes. This serialization is to be used 
  * for classes generated by Avro's 'specific' compiler.
  */
 @SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class AvroSpecificSerialization 
                           extends AvroSerialization<SpecificRecord>{
 
+  @InterfaceAudience.Private
   @Override
   public boolean accept(Class<?> c) {
     return SpecificRecord.class.isAssignableFrom(c);
   }
 
+  @InterfaceAudience.Private
   @Override
   public DatumReader getReader(Class<SpecificRecord> clazz) {
     try {
@@ -47,11 +53,13 @@ public class AvroSpecificSerialization 
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public Schema getSchema(SpecificRecord t) {
     return t.getSchema();
   }
 
+  @InterfaceAudience.Private
   @Override
   public DatumWriter getWriter(Class<SpecificRecord> clazz) {
     return new SpecificDatumWriter();

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java Fri Jun 11 21:34:56 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.ipc.metrics;
 
 import javax.management.ObjectName;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.util.MBeanUtil;
 import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
 import org.apache.hadoop.metrics.util.MetricsRegistry;
@@ -53,7 +54,7 @@ import org.apache.hadoop.metrics.util.Me
  * Impl details: We use a dynamic mbean that gets the list of the metrics
  * from the metrics registry passed as an argument to the constructor
  */
-
+@InterfaceAudience.Private
 public class RpcActivityMBean extends MetricsDynamicMBeanBase {
   private final ObjectName mbeanName;
 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java Fri Jun 11 21:34:56 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.ipc.metrics;
 
 import javax.management.ObjectName;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.util.MBeanUtil;
 import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
 import org.apache.hadoop.metrics.util.MetricsRegistry;
@@ -50,6 +51,7 @@ import org.apache.hadoop.metrics.util.Me
  * Impl details: We use a dynamic mbean that gets the list of the metrics from
  * the metrics registry passed as an argument to the constructor
  */
+@InterfaceAudience.Private
 public class RpcDetailedActivityMBean extends MetricsDynamicMBeanBase {
   private final ObjectName mbeanName;
 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java Fri Jun 11 21:34:56 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.ipc.metrics;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
@@ -33,6 +34,7 @@ import org.apache.hadoop.metrics.util.Me
  * and publishing them through the metrics interfaces.
  * This also registers the JMX MBean for RPC.
  */
+@InterfaceAudience.Private
 public class RpcDetailedMetrics implements Updater {
   public final MetricsRegistry registry = new MetricsRegistry();
   private final MetricsRecord metricsRecord;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java Fri Jun 11 21:34:56 2010
@@ -19,6 +19,7 @@ package org.apache.hadoop.ipc.metrics;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
@@ -43,6 +44,7 @@ import org.apache.hadoop.metrics.util.Me
  *  <p> {@link #rpcQueueTime}.inc(time)
  *
  */
+@InterfaceAudience.Private
 public class RpcMetrics implements Updater {
   private final MetricsRegistry registry = new MetricsRegistry();
   private final MetricsRecord metricsRecord;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgt.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgt.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgt.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgt.java Fri Jun 11 21:34:56 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.ipc.metrics;
 
 import javax.management.ObjectName;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.metrics.util.MBeanUtil;
 
@@ -28,6 +29,7 @@ import org.apache.hadoop.metrics.util.MB
  * This class implements the RpcMgt MBean
  *
  */
+@InterfaceAudience.Private
 class RpcMgt implements RpcMgtMBean {
   private RpcMetrics myMetrics;
   private Server myServer;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java Fri Jun 11 21:34:56 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.ipc.metrics;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 
 /**
  * 
@@ -39,6 +41,7 @@ package org.apache.hadoop.ipc.metrics;
  * The context with the update thread is used to average the data periodically
  *
  */
+@InterfaceAudience.Private
 public interface RpcMgtMBean {
   
   /**

Added: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java?rev=953881&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java Fri Jun 11 21:34:56 2010
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.ipc.metrics;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Propchange: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/metrics/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/package-info.java?rev=953881&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/package-info.java Fri Jun 11 21:34:56 2010
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+package org.apache.hadoop.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Propchange: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/log/LogLevel.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/log/LogLevel.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/log/LogLevel.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/log/LogLevel.java Fri Jun 11 21:34:56 2010
@@ -26,12 +26,15 @@ import javax.servlet.http.*;
 
 import org.apache.commons.logging.*;
 import org.apache.commons.logging.impl.*;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.util.ServletUtil;
 
 /**
  * Change log level in runtime.
  */
+@InterfaceStability.Evolving
 public class LogLevel {
   public static final String USAGES = "\nUSAGES:\n"
     + "java " + LogLevel.class.getName()
@@ -82,6 +85,8 @@ public class LogLevel {
   /**
    * A servlet implementation
    */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+  @InterfaceStability.Unstable
   public static class Servlet extends HttpServlet {
     private static final long serialVersionUID = 1L;
 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ContextFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ContextFactory.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ContextFactory.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ContextFactory.java Fri Jun 11 21:34:56 2010
@@ -28,13 +28,17 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Properties;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.spi.NullContext;
 
 /**
  * Factory class for creating MetricsContext objects.  To obtain an instance
  * of this class, use the static <code>getFactory()</code> method.
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public class ContextFactory {
     
   private static final String PROPERTIES_FILE = 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsContext.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsContext.java Fri Jun 11 21:34:56 2010
@@ -24,11 +24,15 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.spi.OutputRecord;
 
 /**
  * The main interface to the metrics package. 
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public interface MetricsContext {
     
   /**

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsException.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsException.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsException.java Fri Jun 11 21:34:56 2010
@@ -20,9 +20,14 @@
 
 package org.apache.hadoop.metrics;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * General-purpose, unchecked metrics exception.
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public class MetricsException extends RuntimeException {
     
   private static final long serialVersionUID = -1643257498540498497L;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsRecord.java Fri Jun 11 21:34:56 2010
@@ -20,6 +20,9 @@
 
 package org.apache.hadoop.metrics;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A named and optionally tagged set of records to be sent to the metrics
  * system. <p/>
@@ -65,6 +68,8 @@ package org.apache.hadoop.metrics;
  * with the same set of tag names and tag values.  Different threads should 
  * <b>not</b> use the same MetricsRecord instance at the same time.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public interface MetricsRecord {
     
   /**

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsServlet.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsServlet.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsServlet.java Fri Jun 11 21:34:56 2010
@@ -30,6 +30,8 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.metrics.spi.OutputRecord;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
@@ -42,6 +44,8 @@ import org.mortbay.util.ajax.JSON.Output
  * textual representation (no promises are made for parseability), and
  * users can use "?format=json" for parseable output.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class MetricsServlet extends HttpServlet {
   
   /**

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsUtil.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsUtil.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/MetricsUtil.java Fri Jun 11 21:34:56 2010
@@ -22,6 +22,8 @@ import java.net.UnknownHostException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 
 /**
  * Utility class to simplify creation and reporting of hadoop metrics.
@@ -31,6 +33,8 @@ import org.apache.commons.logging.LogFac
  * @see org.apache.hadoop.metrics.MetricsContext
  * @see org.apache.hadoop.metrics.ContextFactory
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public class MetricsUtil {
     
   public static final Log LOG =

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/Updater.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/Updater.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/Updater.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/Updater.java Fri Jun 11 21:34:56 2010
@@ -20,9 +20,14 @@
 
 package org.apache.hadoop.metrics;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Call-back interface.  See <code>MetricsContext.registerUpdater()</code>.
  */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
 public interface Updater {
     
   /**

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java Fri Jun 11 21:34:56 2010
@@ -26,6 +26,8 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
 import org.apache.hadoop.metrics.spi.OutputRecord;
@@ -41,18 +43,24 @@ import org.apache.hadoop.metrics.spi.Out
  * myContextName.period=5
  * </pre>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class FileContext extends AbstractMetricsContext {
     
   /* Configuration attribute names */
+  @InterfaceAudience.Private
   protected static final String FILE_NAME_PROPERTY = "fileName";
+  @InterfaceAudience.Private
   protected static final String PERIOD_PROPERTY = "period";
     
   private File file = null;              // file for metrics to be written to
   private PrintWriter writer = null;
     
   /** Creates a new instance of FileContext */
+  @InterfaceAudience.Private
   public FileContext() {}
     
+  @InterfaceAudience.Private
   public void init(String contextName, ContextFactory factory) {
     super.init(contextName, factory);
         
@@ -67,6 +75,7 @@ public class FileContext extends Abstrac
   /**
    * Returns the configured file name, or null.
    */
+  @InterfaceAudience.Private
   public String getFileName() {
     if (file == null) {
       return null;
@@ -81,6 +90,7 @@ public class FileContext extends Abstrac
    * if specified. Otherwise the data will be written to standard
    * output.
    */
+  @InterfaceAudience.Private
   public void startMonitoring()
     throws IOException 
   {
@@ -96,6 +106,7 @@ public class FileContext extends Abstrac
    * Stops monitoring, closing the file.
    * @see #close()
    */
+  @InterfaceAudience.Private
   public void stopMonitoring() {
     super.stopMonitoring();
         
@@ -108,6 +119,7 @@ public class FileContext extends Abstrac
   /**
    * Emits a metrics record to a file.
    */
+  @InterfaceAudience.Private
   public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
     writer.print(contextName);
     writer.print(".");
@@ -133,6 +145,7 @@ public class FileContext extends Abstrac
   /**
    * Flushes the output writer, forcing updates to disk.
    */
+  @InterfaceAudience.Private
   public void flush() {
     writer.flush();
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java Fri Jun 11 21:34:56 2010
@@ -32,6 +32,8 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsException;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
@@ -42,6 +44,8 @@ import org.apache.hadoop.metrics.spi.Uti
  * Context for sending metrics to Ganglia.
  * 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class GangliaContext extends AbstractMetricsContext {
     
   private static final String PERIOD_PROPERTY = "period";
@@ -83,9 +87,11 @@ public class GangliaContext extends Abst
   private DatagramSocket datagramSocket;
     
   /** Creates a new instance of GangliaContext */
+  @InterfaceAudience.Private
   public GangliaContext() {
   }
     
+  @InterfaceAudience.Private
   public void init(String contextName, ContextFactory factory) {
     super.init(contextName, factory);
     parseAndSetPeriod(PERIOD_PROPERTY);
@@ -106,6 +112,7 @@ public class GangliaContext extends Abst
     }
   }
 
+  @InterfaceAudience.Private
   public void emitRecord(String contextName, String recordName,
     OutputRecord outRec) 
   throws IOException {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/EventCounter.java Fri Jun 11 21:34:56 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.metrics.jvm;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.Level;
 import org.apache.log4j.spi.LoggingEvent;
@@ -27,6 +29,8 @@ import org.apache.log4j.spi.LoggingEvent
  * A log4J Appender that simply counts logging events in three levels:
  * fatal, error and warn.
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class EventCounter extends AppenderSkeleton {
         
     private static final int FATAL = 0;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java Fri Jun 11 21:34:56 2010
@@ -22,6 +22,9 @@ import java.lang.management.MemoryMXBean
 import java.lang.management.MemoryUsage;
 import java.lang.management.ThreadInfo;
 import java.lang.management.ThreadMXBean;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
@@ -38,6 +41,8 @@ import org.apache.commons.logging.LogFac
  * Any application can create an instance of this class in order to emit
  * Java VM metrics.  
  */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
 public class JvmMetrics implements Updater {
     
     private static final float M = 1024*1024;

Added: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/package-info.java?rev=953881&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/package-info.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/package-info.java Fri Jun 11 21:34:56 2010
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.metrics.jvm;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Propchange: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/jvm/package-info.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java Fri Jun 11 21:34:56 2010
@@ -34,6 +34,8 @@ import java.util.TimerTask;
 import java.util.TreeMap;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsException;
@@ -50,6 +52,8 @@ import org.apache.hadoop.metrics.Updater
  * override the abstract <code>emitRecord</code> method in order to transmit
  * the data. <p/>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class AbstractMetricsContext implements MetricsContext {
     
   private int period = MetricsContext.DEFAULT_PERIOD;
@@ -61,6 +65,7 @@ public abstract class AbstractMetricsCon
   private ContextFactory factory = null;
   private String contextName = null;
     
+  @InterfaceAudience.Private
   public static class TagMap extends TreeMap<String,Object> {
     private static final long serialVersionUID = 3546309335061952993L;
     TagMap() {
@@ -84,6 +89,7 @@ public abstract class AbstractMetricsCon
     }
   }
   
+  @InterfaceAudience.Private
   public static class MetricMap extends TreeMap<String,Number> {
     private static final long serialVersionUID = -7495051861141631609L;
     MetricMap() {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/CompositeContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/CompositeContext.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/CompositeContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/CompositeContext.java Fri Jun 11 21:34:56 2010
@@ -26,6 +26,8 @@ import java.util.ArrayList;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsException;
@@ -33,6 +35,8 @@ import org.apache.hadoop.metrics.Metrics
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class CompositeContext extends AbstractMetricsContext {
 
   private static final Log LOG = LogFactory.getLog(CompositeContext.class);
@@ -41,9 +45,11 @@ public class CompositeContext extends Ab
   private final ArrayList<MetricsContext> subctxt =
     new ArrayList<MetricsContext>();
 
+  @InterfaceAudience.Private
   public CompositeContext() {
   }
 
+  @InterfaceAudience.Private
   public void init(String contextName, ContextFactory factory) {
     super.init(contextName, factory);
     int nKids;
@@ -64,6 +70,7 @@ public class CompositeContext extends Ab
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public MetricsRecord newRecord(String recordName) {
     return (MetricsRecord) Proxy.newProxyInstance(
@@ -72,6 +79,7 @@ public class CompositeContext extends Ab
         new MetricsRecordDelegator(recordName, subctxt));
   }
 
+  @InterfaceAudience.Private
   @Override
   protected void emitRecord(String contextName, String recordName,
       OutputRecord outRec) throws IOException {
@@ -88,6 +96,7 @@ public class CompositeContext extends Ab
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   protected void flush() throws IOException {
     for (MetricsContext ctxt : subctxt) {
@@ -99,6 +108,7 @@ public class CompositeContext extends Ab
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public void startMonitoring() throws IOException {
     for (MetricsContext ctxt : subctxt) {
@@ -110,6 +120,7 @@ public class CompositeContext extends Ab
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public void stopMonitoring() {
     for (MetricsContext ctxt : subctxt) {
@@ -120,6 +131,7 @@ public class CompositeContext extends Ab
   /**
    * Return true if all subcontexts are monitoring.
    */
+  @InterfaceAudience.Private
   @Override
   public boolean isMonitoring() {
     boolean ret = true;
@@ -129,6 +141,7 @@ public class CompositeContext extends Ab
     return ret;
   }
 
+  @InterfaceAudience.Private
   @Override
   public void close() {
     for (MetricsContext ctxt : subctxt) {
@@ -136,6 +149,7 @@ public class CompositeContext extends Ab
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public void registerUpdater(Updater updater) {
     for (MetricsContext ctxt : subctxt) {
@@ -143,6 +157,7 @@ public class CompositeContext extends Ab
     }
   }
 
+  @InterfaceAudience.Private
   @Override
   public void unregisterUpdater(Updater updater) {
     for (MetricsContext ctxt : subctxt) {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricValue.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricValue.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricValue.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricValue.java Fri Jun 11 21:34:56 2010
@@ -20,9 +20,14 @@
 
 package org.apache.hadoop.metrics.spi;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A Number that is either an absolute or an incremental amount.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class MetricValue {
     
   public static final boolean ABSOLUTE = false;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java Fri Jun 11 21:34:56 2010
@@ -22,6 +22,10 @@ package org.apache.hadoop.metrics.spi;
 
 import java.util.LinkedHashMap;
 import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.MetricsException;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
 
@@ -30,6 +34,8 @@ import org.apache.hadoop.metrics.spi.Abs
  * from which it was created, and delegates back to it on <code>update</code>
  * and <code>remove()</code>.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class MetricsRecordImpl implements MetricsRecord {
     
   private TagMap tagTable = new TagMap();

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/NoEmitMetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/NoEmitMetricsContext.java?rev=953881&r1=953880&r2=953881&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/NoEmitMetricsContext.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/metrics/spi/NoEmitMetricsContext.java Fri Jun 11 21:34:56 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.metrics.spi;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsServlet;
 
@@ -27,14 +29,18 @@ import org.apache.hadoop.metrics.Metrics
  * This is useful if you want to support {@link MetricsServlet}, but
  * not emit metrics in any other way.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class NoEmitMetricsContext extends AbstractMetricsContext {
     
     private static final String PERIOD_PROPERTY = "period";
       
     /** Creates a new instance of NullContextWithUpdateThread */
+    @InterfaceAudience.Private
     public NoEmitMetricsContext() {
     }
     
+    @InterfaceAudience.Private
     public void init(String contextName, ContextFactory factory) {
       super.init(contextName, factory);
       parseAndSetPeriod(PERIOD_PROPERTY);
@@ -43,6 +49,7 @@ public class NoEmitMetricsContext extend
     /**
      * Do-nothing version of emitRecord
      */
+    @InterfaceAudience.Private
     protected void emitRecord(String contextName, String recordName,
                               OutputRecord outRec) {
     }