You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2010/12/04 08:13:12 UTC

svn commit: r1042107 [5/6] - in /hadoop/common/branches/HADOOP-6685: ./ ivy/ src/java/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/io/file/tfile/ src/java/org/apache/hadoop/io/serial/ src/java/org/apache/had...

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java Sat Dec  4 07:13:10 2010
@@ -33,9 +33,13 @@ import org.apache.hadoop.io.RawComparato
  * </p>
  * @param <T>
  * @see JavaSerialization
+ * @deprecated Use 
+ *    {@link org.apache.hadoop.io.serial.lib.DeserializationRawComparator}
+ *    instead.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
+@Deprecated
 public class JavaSerializationComparator<T extends Serializable&Comparable<T>>
   extends DeserializerComparator<T> {
 

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serialization.java Sat Dec  4 07:13:10 2010
@@ -26,9 +26,13 @@ import org.apache.hadoop.classification.
  * Encapsulates a {@link Serializer}/{@link Deserializer} pair.
  * </p>
  * @param <T>
+ * @deprecated Use 
+ *    {@link org.apache.hadoop.io.serial.Serialization}
+ *    instead.
  */
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
+@Deprecated
 public interface Serialization<T> {
   
   /**

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java Sat Dec  4 07:13:10 2010
@@ -36,9 +36,13 @@ import org.apache.hadoop.util.StringUtil
  * <p>
  * A factory for {@link Serialization}s.
  * </p>
+ * @deprecated Use 
+ *    {@link org.apache.hadoop.io.serial.SerializationFactory}
+ *    instead.
  */
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
+@Deprecated
 public class SerializationFactory extends Configured {
   
   private static final Log LOG =

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serializer.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serializer.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/Serializer.java Sat Dec  4 07:13:10 2010
@@ -36,9 +36,13 @@ import org.apache.hadoop.classification.
  * {@link #serialize(Object)}.
  * </p>
  * @param <T>
+ * @deprecated Use 
+ *    {@link org.apache.hadoop.io.serial.Serialization}
+ *    instead.
  */
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
+@Deprecated
 public interface Serializer<T> {
   /**
    * <p>Prepare the serializer for writing.</p>

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java Sat Dec  4 07:13:10 2010
@@ -23,7 +23,6 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.util.Map;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -36,9 +35,13 @@ import org.apache.hadoop.util.Reflection
  * A {@link Serialization} for {@link Writable}s that delegates to
  * {@link Writable#write(java.io.DataOutput)} and
  * {@link Writable#readFields(java.io.DataInput)}.
+ * @deprecated Use 
+ *    {@link org.apache.hadoop.io.serial.lib.WritableSerialization}
+ *    instead.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
+@Deprecated
 public class WritableSerialization extends Configured
 	implements Serialization<Writable> {
   static class WritableDeserializer extends Configured

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerializable.java Sat Dec  4 07:13:10 2010
@@ -25,9 +25,13 @@ import org.apache.hadoop.classification.
  * Tag interface for Avro 'reflect' serializable classes. Classes implementing 
  * this interface can be serialized/deserialized using 
  * {@link AvroReflectSerialization}.
+ * @deprecated Use {@link org.apache.hadoop.io.serial.lib.avro.AvroReflectSerializable}
+ *    instead.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public interface AvroReflectSerializable {
+@Deprecated
+public interface AvroReflectSerializable 
+  extends org.apache.hadoop.io.serial.lib.avro.AvroReflectSerializable {
 
 }

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java Sat Dec  4 07:13:10 2010
@@ -35,11 +35,13 @@ import org.apache.hadoop.classification.
  * serialization, it must either be in the package list configured via 
  * <code>avro.reflect.pkgs</code> or implement 
  * {@link AvroReflectSerializable} interface.
- *
+ * @deprecated Use {@link org.apache.hadoop.io.serial.lib.avro.AvroSerialization}
+ *    instead.
  */
 @SuppressWarnings("unchecked")
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
+@Deprecated
 public class AvroReflectSerialization extends AvroSerialization<Object>{
 
   /**

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java Sat Dec  4 07:13:10 2010
@@ -37,9 +37,12 @@ import org.apache.hadoop.io.serializer.S
 
 /**
  * Base class for providing serialization to Avro types.
+ * @deprecated Use {@link org.apache.hadoop.io.serial.lib.avro.AvroSerialization}
+ *    instead.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
+@Deprecated
 public abstract class AvroSerialization<T> extends Configured 
 	implements Serialization<T>{
   

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java Sat Dec  4 07:13:10 2010
@@ -30,10 +30,13 @@ import org.apache.hadoop.classification.
 /**
  * Serialization for Avro Specific classes. This serialization is to be used 
  * for classes generated by Avro's 'specific' compiler.
+ * @deprecated Use {@link org.apache.hadoop.io.serial.lib.avro.AvroSerialization}
+ *    instead.
  */
 @SuppressWarnings("unchecked")
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
+@Deprecated
 public class AvroSpecificSerialization 
                           extends AvroSerialization<SpecificRecord>{
 

Added: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/package-info.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/package-info.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/io/serializer/package-info.java Sat Dec  4 07:13:10 2010
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This deprecated package provides a mechanism for using different 
+ * serialization frameworks in Hadoop. The property "io.serializations" 
+ * defines a list of
+ * {@link org.apache.hadoop.io.serializer.Serialization}s that know how to 
+ * create {@link org.apache.hadoop.io.serializer.Serializer}s and
+ * {@link org.apache.hadoop.io.serializer.Deserializer}s.
+ * <p>
+ *
+ * To add a new serialization framework write an implementation of
+ * {@link org.apache.hadoop.io.serializer.Serialization} and add its name to 
+ * the "io.serializations" property.
+ * <p>
+ * 
+ * This package has been replaced by the {@link org.apache.hadoop.io.serial} 
+ * package.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+@Deprecated
+package org.apache.hadoop.io.serializer;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/security/SaslRpcServer.java Sat Dec  4 07:13:10 2010
@@ -239,7 +239,7 @@ public class SaslRpcServer {
         if (ac.isAuthorized()) {
           if (LOG.isDebugEnabled()) {
             String username = getIdentifier(authzid, secretManager).getUser()
-            .getUserName().toString();
+            .getUserName();
             LOG.debug("SASL server DIGEST-MD5 callback: setting "
                 + "canonicalized client ID: " + username);
           }

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/Options.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/Options.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/Options.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/Options.java Sat Dec  4 07:13:10 2010
@@ -22,6 +22,8 @@ import java.util.Arrays;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.serial.RawComparator;
+import org.apache.hadoop.io.serial.Serialization;
 
 /**
  * This class allows generic access to variable length type-safe parameter
@@ -79,6 +81,16 @@ public class Options {
     }
   }
 
+  public static abstract class SerializationOption {
+    private final Serialization<?> value;
+    protected SerializationOption(Serialization<?> value) {
+      this.value = value;
+    }
+    public Serialization<?> getValue() {
+      return value;
+    }
+  }
+  
   public static abstract class PathOption {
     private final Path value;
     protected PathOption(Path value) {
@@ -119,6 +131,16 @@ public class Options {
     }
   }
 
+  public static abstract class ComparatorOption {
+    private final RawComparator value;
+    protected ComparatorOption(RawComparator value) {
+      this.value = value;
+    }
+    public RawComparator getValue() {
+      return value;
+    }
+  }
+
   /**
    * Find the first option of the required class.
    * @param <T> the static class to find
@@ -129,8 +151,7 @@ public class Options {
    * @throws IOException
    */
   @SuppressWarnings("unchecked")
-  public static <base, T extends base> T getOption(Class<T> cls, base [] opts
-                                                   ) throws IOException {
+  public static <base, T extends base> T getOption(Class<T> cls, base [] opts) {
     for(base o: opts) {
       if (o.getClass() == cls) {
         return (T) o;

Modified: hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/ReflectionUtils.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/java/org/apache/hadoop/util/ReflectionUtils.java Sat Dec  4 07:13:10 2010
@@ -37,9 +37,9 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.serializer.Deserializer;
-import org.apache.hadoop.io.serializer.SerializationFactory;
-import org.apache.hadoop.io.serializer.Serializer;
+import org.apache.hadoop.io.serial.SerializationFactory;
+import org.apache.hadoop.io.serial.Serialization;
+import org.apache.hadoop.io.serial.lib.WritableSerialization;
 
 /**
  * General reflection utils
@@ -49,7 +49,6 @@ import org.apache.hadoop.io.serializer.S
 public class ReflectionUtils {
     
   private static final Class<?>[] EMPTY_ARRAY = new Class[]{};
-  volatile private static SerializationFactory serialFactory = null;
 
   /** 
    * Cache of constructors for each class. Pins the classes so they
@@ -257,43 +256,59 @@ public class ReflectionUtils {
       }
     };
 
-  private static SerializationFactory getFactory(Configuration conf) {
-    if (serialFactory == null) {
-      serialFactory = new SerializationFactory(conf);
-    }
-    return serialFactory;
-  }
-  
   /**
-   * Make a copy of the writable object using serialization to a buffer
+   * Make a copy of the object using serialization to a buffer
    * @param dst the object to copy from
    * @param src the object to copy into, which is destroyed
    * @throws IOException
    */
   @SuppressWarnings("unchecked")
   public static <T> T copy(Configuration conf, 
-                                T src, T dst) throws IOException {
+                           T src, T dst) throws IOException {
+    SerializationFactory factory = SerializationFactory.getInstance(conf);
+    Class<T> cls = (Class<T>) src.getClass();
+    Serialization<T> serializer = 
+      (Serialization<T>) factory.getSerializationByType(cls);
+    return copy(conf, src, dst, serializer);
+  }
+  
+  /**
+   * Make a copy of the object with the given serialization.
+   * @param <T> the type to copy
+   * @param conf the configuration to initialize the new object with
+   * @param src the object to copy
+   * @param dst the object to copy into, which can be null
+   * @param serial the serialization to use
+   * @return the new object that was copied into
+   * @throws IOException
+   */
+  @SuppressWarnings("unchecked")
+  public static <T> T copy(Configuration conf, T src, T dst, 
+                           Serialization<T> serial) throws IOException {
     CopyInCopyOutBuffer buffer = cloneBuffers.get();
     buffer.outBuffer.reset();
-    SerializationFactory factory = getFactory(conf);
+    SerializationFactory factory = SerializationFactory.getInstance(conf);
     Class<T> cls = (Class<T>) src.getClass();
-    Serializer<T> serializer = factory.getSerializer(cls);
-    serializer.open(buffer.outBuffer);
-    serializer.serialize(src);
+    Serialization<T> serializer = 
+      (Serialization<T>) factory.getSerializationByType(cls);
+    serializer.serialize(buffer.outBuffer, src);
     buffer.moveData();
-    Deserializer<T> deserializer = factory.getDeserializer(cls);
-    deserializer.open(buffer.inBuffer);
-    dst = deserializer.deserialize(dst);
-    return dst;
+    return serializer.deserialize(buffer.inBuffer, dst, conf);
   }
 
+  private static Configuration defaultConfiguration = null;
+  private static synchronized Configuration getDefaultConfiguration() {
+    if (defaultConfiguration == null) {
+      defaultConfiguration = new Configuration();
+    }
+    return defaultConfiguration;
+  }
+  
   @Deprecated
   public static void cloneWritableInto(Writable dst, 
                                        Writable src) throws IOException {
-    CopyInCopyOutBuffer buffer = cloneBuffers.get();
-    buffer.outBuffer.reset();
-    src.write(buffer.outBuffer);
-    buffer.moveData();
-    dst.readFields(buffer.inBuffer);
+    WritableSerialization serial = new WritableSerialization();
+    serial.setSpecificType(src.getClass());
+    copy(getDefaultConfiguration(), src, dst, serial);
   }
 }

Added: hadoop/common/branches/HADOOP-6685/src/protobuf/SerializationMetadata.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/protobuf/SerializationMetadata.proto?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/protobuf/SerializationMetadata.proto (added)
+++ hadoop/common/branches/HADOOP-6685/src/protobuf/SerializationMetadata.proto Sat Dec  4 07:13:10 2010
@@ -0,0 +1,15 @@
+package org.apache.hadoop.io.serial.lib;
+
+message TypedSerializationMetadata {
+  optional string typename = 1;
+}
+
+message AvroMetadata {
+  optional string schema = 1;
+  optional Kind kind = 2;
+  enum Kind {
+    SPECIFIC = 1;
+    GENERIC = 2;
+    REFLECTION = 3;
+  }
+}
\ No newline at end of file

Added: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroKey.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroKey.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroKey.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroKey.java Sat Dec  4 07:13:10 2010
@@ -0,0 +1,21 @@
+package org.apache.hadoop.io;
+
+@SuppressWarnings("all")
+public class AvroKey extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+  public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AvroKey\",\"namespace\":\"org.apache.hadoop.io\",\"fields\":[{\"name\":\"value\",\"type\":\"int\"}]}");
+  public int value;
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+    case 0: return value;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+    case 0: value = (java.lang.Integer)value$; break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+}

Added: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroValue.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroValue.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroValue.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/AvroValue.java Sat Dec  4 07:13:10 2010
@@ -0,0 +1,21 @@
+package org.apache.hadoop.io;
+
+@SuppressWarnings("all")
+public class AvroValue extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+  public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"AvroValue\",\"namespace\":\"org.apache.hadoop.io\",\"fields\":[{\"name\":\"value\",\"type\":\"string\"}]}");
+  public org.apache.avro.util.Utf8 value;
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+    case 0: return value;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+    case 0: value = (org.apache.avro.util.Utf8)value$; break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+}

Added: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.java Sat Dec  4 07:13:10 2010
@@ -0,0 +1,641 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: src/test/core/org/apache/hadoop/io/ProtoTest.proto
+
+package org.apache.hadoop.io;
+
+public final class ProtoTest {
+  private ProtoTest() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public static final class ProtoKey extends
+      com.google.protobuf.GeneratedMessage {
+    // Use ProtoKey.newBuilder() to construct.
+    private ProtoKey() {
+      initFields();
+    }
+    private ProtoKey(boolean noInit) {}
+    
+    private static final ProtoKey defaultInstance;
+    public static ProtoKey getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public ProtoKey getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoKey_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoKey_fieldAccessorTable;
+    }
+    
+    // required int32 value = 1;
+    public static final int VALUE_FIELD_NUMBER = 1;
+    private boolean hasValue;
+    private int value_ = 0;
+    public boolean hasValue() { return hasValue; }
+    public int getValue() { return value_; }
+    
+    private void initFields() {
+    }
+    public final boolean isInitialized() {
+      if (!hasValue) return false;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (hasValue()) {
+        output.writeInt32(1, getValue());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (hasValue()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, getValue());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoKey parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.io.ProtoTest.ProtoKey prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder> {
+      private org.apache.hadoop.io.ProtoTest.ProtoKey result;
+      
+      // Construct using org.apache.hadoop.io.ProtoTest.ProtoKey.newBuilder()
+      private Builder() {}
+      
+      private static Builder create() {
+        Builder builder = new Builder();
+        builder.result = new org.apache.hadoop.io.ProtoTest.ProtoKey();
+        return builder;
+      }
+      
+      protected org.apache.hadoop.io.ProtoTest.ProtoKey internalGetResult() {
+        return result;
+      }
+      
+      public Builder clear() {
+        if (result == null) {
+          throw new IllegalStateException(
+            "Cannot call clear() after build().");
+        }
+        result = new org.apache.hadoop.io.ProtoTest.ProtoKey();
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(result);
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.io.ProtoTest.ProtoKey.getDescriptor();
+      }
+      
+      public org.apache.hadoop.io.ProtoTest.ProtoKey getDefaultInstanceForType() {
+        return org.apache.hadoop.io.ProtoTest.ProtoKey.getDefaultInstance();
+      }
+      
+      public boolean isInitialized() {
+        return result.isInitialized();
+      }
+      public org.apache.hadoop.io.ProtoTest.ProtoKey build() {
+        if (result != null && !isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return buildPartial();
+      }
+      
+      private org.apache.hadoop.io.ProtoTest.ProtoKey buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        if (!isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return buildPartial();
+      }
+      
+      public org.apache.hadoop.io.ProtoTest.ProtoKey buildPartial() {
+        if (result == null) {
+          throw new IllegalStateException(
+            "build() has already been called on this Builder.");
+        }
+        org.apache.hadoop.io.ProtoTest.ProtoKey returnMe = result;
+        result = null;
+        return returnMe;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.io.ProtoTest.ProtoKey) {
+          return mergeFrom((org.apache.hadoop.io.ProtoTest.ProtoKey)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.io.ProtoTest.ProtoKey other) {
+        if (other == org.apache.hadoop.io.ProtoTest.ProtoKey.getDefaultInstance()) return this;
+        if (other.hasValue()) {
+          setValue(other.getValue());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                return this;
+              }
+              break;
+            }
+            case 8: {
+              setValue(input.readInt32());
+              break;
+            }
+          }
+        }
+      }
+      
+      
+      // required int32 value = 1;
+      public boolean hasValue() {
+        return result.hasValue();
+      }
+      public int getValue() {
+        return result.getValue();
+      }
+      public Builder setValue(int value) {
+        result.hasValue = true;
+        result.value_ = value;
+        return this;
+      }
+      public Builder clearValue() {
+        result.hasValue = false;
+        result.value_ = 0;
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:org.apache.hadoop.io.ProtoKey)
+    }
+    
+    static {
+      defaultInstance = new ProtoKey(true);
+      org.apache.hadoop.io.ProtoTest.internalForceInit();
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:org.apache.hadoop.io.ProtoKey)
+  }
+  
+  public static final class ProtoValue extends
+      com.google.protobuf.GeneratedMessage {
+    // Use ProtoValue.newBuilder() to construct.
+    private ProtoValue() {
+      initFields();
+    }
+    private ProtoValue(boolean noInit) {}
+    
+    private static final ProtoValue defaultInstance;
+    public static ProtoValue getDefaultInstance() {
+      return defaultInstance;
+    }
+    
+    public ProtoValue getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+    
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoValue_descriptor;
+    }
+    
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.hadoop.io.ProtoTest.internal_static_org_apache_hadoop_io_ProtoValue_fieldAccessorTable;
+    }
+    
+    // required string value = 2;
+    public static final int VALUE_FIELD_NUMBER = 2;
+    private boolean hasValue;
+    private java.lang.String value_ = "";
+    public boolean hasValue() { return hasValue; }
+    public java.lang.String getValue() { return value_; }
+    
+    private void initFields() {
+    }
+    public final boolean isInitialized() {
+      if (!hasValue) return false;
+      return true;
+    }
+    
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (hasValue()) {
+        output.writeString(2, getValue());
+      }
+      getUnknownFields().writeTo(output);
+    }
+    
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+    
+      size = 0;
+      if (hasValue()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeStringSize(2, getValue());
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+    
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return newBuilder().mergeFrom(data, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      Builder builder = newBuilder();
+      if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+        return builder.buildParsed();
+      } else {
+        return null;
+      }
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input).buildParsed();
+    }
+    public static org.apache.hadoop.io.ProtoTest.ProtoValue parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return newBuilder().mergeFrom(input, extensionRegistry)
+               .buildParsed();
+    }
+    
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.hadoop.io.ProtoTest.ProtoValue prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+    
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder> {
+      private org.apache.hadoop.io.ProtoTest.ProtoValue result;
+      
+      // Construct using org.apache.hadoop.io.ProtoTest.ProtoValue.newBuilder()
+      private Builder() {}
+      
+      private static Builder create() {
+        Builder builder = new Builder();
+        builder.result = new org.apache.hadoop.io.ProtoTest.ProtoValue();
+        return builder;
+      }
+      
+      protected org.apache.hadoop.io.ProtoTest.ProtoValue internalGetResult() {
+        return result;
+      }
+      
+      public Builder clear() {
+        if (result == null) {
+          throw new IllegalStateException(
+            "Cannot call clear() after build().");
+        }
+        result = new org.apache.hadoop.io.ProtoTest.ProtoValue();
+        return this;
+      }
+      
+      public Builder clone() {
+        return create().mergeFrom(result);
+      }
+      
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.hadoop.io.ProtoTest.ProtoValue.getDescriptor();
+      }
+      
+      public org.apache.hadoop.io.ProtoTest.ProtoValue getDefaultInstanceForType() {
+        return org.apache.hadoop.io.ProtoTest.ProtoValue.getDefaultInstance();
+      }
+      
+      public boolean isInitialized() {
+        return result.isInitialized();
+      }
+      public org.apache.hadoop.io.ProtoTest.ProtoValue build() {
+        if (result != null && !isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return buildPartial();
+      }
+      
+      private org.apache.hadoop.io.ProtoTest.ProtoValue buildParsed()
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        if (!isInitialized()) {
+          throw newUninitializedMessageException(
+            result).asInvalidProtocolBufferException();
+        }
+        return buildPartial();
+      }
+      
+      public org.apache.hadoop.io.ProtoTest.ProtoValue buildPartial() {
+        if (result == null) {
+          throw new IllegalStateException(
+            "build() has already been called on this Builder.");
+        }
+        org.apache.hadoop.io.ProtoTest.ProtoValue returnMe = result;
+        result = null;
+        return returnMe;
+      }
+      
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.hadoop.io.ProtoTest.ProtoValue) {
+          return mergeFrom((org.apache.hadoop.io.ProtoTest.ProtoValue)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+      
+      public Builder mergeFrom(org.apache.hadoop.io.ProtoTest.ProtoValue other) {
+        if (other == org.apache.hadoop.io.ProtoTest.ProtoValue.getDefaultInstance()) return this;
+        if (other.hasValue()) {
+          setValue(other.getValue());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+      
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder(
+            this.getUnknownFields());
+        while (true) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              this.setUnknownFields(unknownFields.build());
+              return this;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                this.setUnknownFields(unknownFields.build());
+                return this;
+              }
+              break;
+            }
+            case 18: {
+              setValue(input.readString());
+              break;
+            }
+          }
+        }
+      }
+      
+      
+      // required string value = 2;
+      public boolean hasValue() {
+        return result.hasValue();
+      }
+      public java.lang.String getValue() {
+        return result.getValue();
+      }
+      public Builder setValue(java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  result.hasValue = true;
+        result.value_ = value;
+        return this;
+      }
+      public Builder clearValue() {
+        result.hasValue = false;
+        result.value_ = getDefaultInstance().getValue();
+        return this;
+      }
+      
+      // @@protoc_insertion_point(builder_scope:org.apache.hadoop.io.ProtoValue)
+    }
+    
+    static {
+      defaultInstance = new ProtoValue(true);
+      org.apache.hadoop.io.ProtoTest.internalForceInit();
+      defaultInstance.initFields();
+    }
+    
+    // @@protoc_insertion_point(class_scope:org.apache.hadoop.io.ProtoValue)
+  }
+  
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_io_ProtoKey_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_io_ProtoKey_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_hadoop_io_ProtoValue_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_hadoop_io_ProtoValue_fieldAccessorTable;
+  
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n2src/test/core/org/apache/hadoop/io/Pro" +
+      "toTest.proto\022\024org.apache.hadoop.io\"\031\n\010Pr" +
+      "otoKey\022\r\n\005value\030\001 \002(\005\"\033\n\nProtoValue\022\r\n\005v" +
+      "alue\030\002 \002(\tB\002H\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_org_apache_hadoop_io_ProtoKey_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_org_apache_hadoop_io_ProtoKey_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_io_ProtoKey_descriptor,
+              new java.lang.String[] { "Value", },
+              org.apache.hadoop.io.ProtoTest.ProtoKey.class,
+              org.apache.hadoop.io.ProtoTest.ProtoKey.Builder.class);
+          internal_static_org_apache_hadoop_io_ProtoValue_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+          internal_static_org_apache_hadoop_io_ProtoValue_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_hadoop_io_ProtoValue_descriptor,
+              new java.lang.String[] { "Value", },
+              org.apache.hadoop.io.ProtoTest.ProtoValue.class,
+              org.apache.hadoop.io.ProtoTest.ProtoValue.Builder.class);
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+  
+  public static void internalForceInit() {}
+  
+  // @@protoc_insertion_point(outer_class_scope)
+}

Added: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.proto?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.proto (added)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ProtoTest.proto Sat Dec  4 07:13:10 2010
@@ -0,0 +1,11 @@
+package org.apache.hadoop.io;
+
+option optimize_for = SPEED;
+
+message ProtoKey {
+  required int32 value = 1;
+}
+
+message ProtoValue {
+  required string value = 2;
+}

Modified: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/RandomDatum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/RandomDatum.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/RandomDatum.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/RandomDatum.java Sat Dec  4 07:13:10 2010
@@ -39,7 +39,7 @@ public class RandomDatum implements Writ
   
   public void write(DataOutput out) throws IOException {
     out.writeInt(length);
-    out.write(data);
+    out.write(data, 0, length);
   }
 
   public void readFields(DataInput in) throws IOException {

Modified: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestDefaultStringifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestDefaultStringifier.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestDefaultStringifier.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestDefaultStringifier.java Sat Dec  4 07:13:10 2010
@@ -26,6 +26,9 @@ import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.io.serial.lib.JavaSerialization;
+import org.apache.hadoop.io.serial.lib.WritableSerialization;
 
 public class TestDefaultStringifier extends TestCase {
 
@@ -36,7 +39,8 @@ public class TestDefaultStringifier exte
 
   public void testWithWritable() throws Exception {
 
-    conf.set("io.serializations", "org.apache.hadoop.io.serializer.WritableSerialization");
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SERIALIZATIONS_KEY, 
+             WritableSerialization.class.getName());
 
     LOG.info("Testing DefaultStringifier with Text");
 
@@ -51,7 +55,8 @@ public class TestDefaultStringifier exte
         builder.append(alphabet[random.nextInt(alphabet.length)]);
       }
       Text text = new Text(builder.toString());
-      DefaultStringifier<Text> stringifier = new DefaultStringifier<Text>(conf, Text.class);
+      DefaultStringifier<Text> stringifier = 
+        new DefaultStringifier<Text>(conf, Text.class);
 
       String str = stringifier.toString(text);
       Text claimedText = stringifier.fromString(str);
@@ -62,13 +67,15 @@ public class TestDefaultStringifier exte
   }
 
   public void testWithJavaSerialization() throws Exception {
-    conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization");
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SERIALIZATIONS_KEY, 
+             JavaSerialization.class.getName());
 
     LOG.info("Testing DefaultStringifier with Serializable Integer");
 
     //Integer implements Serializable
     Integer testInt = Integer.valueOf(42);
-    DefaultStringifier<Integer> stringifier = new DefaultStringifier<Integer>(conf, Integer.class);
+    DefaultStringifier<Integer> stringifier = 
+      new DefaultStringifier<Integer>(conf, Integer.class);
 
     String str = stringifier.toString(testInt);
     Integer claimedInt = stringifier.fromString(str);
@@ -80,7 +87,8 @@ public class TestDefaultStringifier exte
   public void testStoreLoad() throws IOException {
 
     LOG.info("Testing DefaultStringifier#store() and #load()");
-    conf.set("io.serializations", "org.apache.hadoop.io.serializer.WritableSerialization");
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SERIALIZATIONS_KEY, 
+             WritableSerialization.class.getName());
     Text text = new Text("uninteresting test string");
     String keyName = "test.defaultstringifier.key1";
 
@@ -94,7 +102,8 @@ public class TestDefaultStringifier exte
 
   public void testStoreLoadArray() throws IOException {
     LOG.info("Testing DefaultStringifier#storeArray() and #loadArray()");
-    conf.set("io.serializations", "org.apache.hadoop.io.serializer.JavaSerialization");
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SERIALIZATIONS_KEY, 
+             JavaSerialization.class.getName());
 
     String keyName = "test.defaultstringifier.key2";
 

Modified: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestMapFile.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestMapFile.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestMapFile.java Sat Dec  4 07:13:10 2010
@@ -36,10 +36,13 @@ public class TestMapFile extends TestCas
       getName() + ".mapfile"); 
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
+    fs.delete(qualifiedDirName, true);
     // Make an index entry for every third insertion.
     MapFile.Writer.setIndexInterval(conf, 3);
-    MapFile.Writer writer = new MapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), Text.class, Text.class);
+    MapFile.Writer writer = 
+      new MapFile.Writer(conf, qualifiedDirName, 
+                         MapFile.Writer.keyClass(Text.class), 
+                         MapFile.Writer.valueClass(Text.class));
     // Assert that the index interval is 1
     assertEquals(3, writer.getIndexInterval());
     // Add entries up to 100 in intervals of ten.
@@ -51,8 +54,7 @@ public class TestMapFile extends TestCas
     }
     writer.close();
     // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
-      conf);
+    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
     Text key = new Text("55");
     Text value = new Text();
     Text closest = (Text)reader.getClosest(key, value);
@@ -94,14 +96,16 @@ public class TestMapFile extends TestCas
       getName() + ".mapfile"); 
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
+    fs.delete(qualifiedDirName, true);
  
-    MapFile.Writer writer = new MapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
+    MapFile.Writer writer = 
+      new MapFile.Writer(conf, qualifiedDirName, 
+                         MapFile.Writer.keyClass(IntWritable.class), 
+                         MapFile.Writer.valueClass(IntWritable.class));
     writer.append(new IntWritable(1), new IntWritable(1));
     writer.close();
     // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
-      conf);
+    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
     assertEquals(new IntWritable(1), reader.midKey());
   }
 
@@ -112,13 +116,15 @@ public class TestMapFile extends TestCas
       getName() + ".mapfile"); 
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
+    fs.delete(qualifiedDirName, true);
  
-    MapFile.Writer writer = new MapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
+    MapFile.Writer writer =
+      new MapFile.Writer(conf, qualifiedDirName, 
+                         MapFile.Writer.keyClass(IntWritable.class), 
+                         MapFile.Writer.valueClass(IntWritable.class));
     writer.close();
     // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
-      conf);
+    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
     assertEquals(null, reader.midKey());
   }
 }

Modified: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestSequenceFileSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestSequenceFileSerialization.java?rev=1042107&r1=1042106&r2=1042107&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestSequenceFileSerialization.java (original)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/TestSequenceFileSerialization.java Sat Dec  4 07:13:10 2010
@@ -20,50 +20,314 @@ package org.apache.hadoop.io;
 
 import junit.framework.TestCase;
 
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.util.Utf8;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.ProtoTest.ProtoKey;
+import org.apache.hadoop.io.ProtoTest.ProtoValue;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.SequenceFile.Reader;
 import org.apache.hadoop.io.SequenceFile.Writer;
+import org.apache.hadoop.io.serial.Serialization;
+import org.apache.hadoop.io.serial.SerializationFactory;
+import org.apache.hadoop.io.serial.lib.CompatibilitySerialization;
+import org.apache.hadoop.io.serial.lib.JavaSerialization;
+import org.apache.hadoop.io.serial.lib.avro.AvroSerialization;
+import org.apache.hadoop.io.serial.lib.avro.AvroSerialization.Kind;
 
 public class TestSequenceFileSerialization extends TestCase {
   
   private Configuration conf;
   private FileSystem fs;
-  
+  private Path file;
+
   @Override
   protected void setUp() throws Exception {
     conf = new Configuration();
-    conf.set("io.serializations",
-        "org.apache.hadoop.io.serializer.JavaSerialization");
     fs = FileSystem.getLocal(conf);  
+    file = new Path(System.getProperty("test.build.data",".") + "/test.seq");
   }
   
   @Override
   protected void tearDown() throws Exception {
     fs.close();
   }
-  
-  public void testJavaSerialization() throws Exception {
-    Path file = new Path(System.getProperty("test.build.data",".") +
-        "/test.seq");
+
+  public void writeAvroSpecificSerialization(CompressionType kind
+                                             ) throws Exception {
+    AvroKey key = new AvroKey();
+    AvroValue value = new AvroValue();
+    fs.delete(file, true);
+    Writer writer = 
+      SequenceFile.createWriter(conf,
+                                SequenceFile.Writer.file(file),
+                                SequenceFile.Writer.compression(kind),
+                                SequenceFile.Writer.keyClass(AvroKey.class),
+                                SequenceFile.Writer.valueClass(AvroValue.class));
+    key.value = 1;
+    value.value = new Utf8("one");
+    writer.append(key, value);
+    key.value = 2;
+    value.value = new Utf8("two");
+    writer.append(key, value);
+    String writerKeySerialStr = writer.getKeySerialization().toString();
+    String writerValueSerialStr = writer.getValueSerialization().toString();
+    writer.close();
+
+    assertEquals("{schema: '{\"type\":\"record\",\"name\":\"AvroKey\"," +
+                 "\"namespace\":\"org.apache.hadoop.io\",\"fields\":[{" +
+                 "\"name\":\"value\",\"type\":\"int\"}]}',\n" +
+                 "  kind: SPECIFIC}\n", 
+                 writerKeySerialStr);
+    assertEquals("{schema: '{\"type\":\"record\",\"name\":\"AvroValue\"," +
+                 "\"namespace\":\"org.apache.hadoop.io\",\"fields\":[{" +
+                 "\"name\":\"value\",\"type\":\"string\"}]}',\n" +
+                 "  kind: SPECIFIC}\n", 
+                 writerValueSerialStr);
     
+    SerializationFactory factory = SerializationFactory.getInstance(conf);
+    Serialization<?> keySerialClone = factory.getSerialization("avro");
+    keySerialClone.fromString(writerKeySerialStr);
+    Serialization<?> valueSerialClone = factory.getSerialization("avro");
+    valueSerialClone.fromString(writerValueSerialStr);
+
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file));
+    Serialization<?> keySerial = reader.getKeySerialization();
+    Serialization<?> valueSerial = reader.getValueSerialization();
+    assertEquals(kind, reader.getCompressionType());
+    assertEquals("avro", keySerial.getName());
+    assertEquals(writerKeySerialStr, keySerial.toString());
+    assertEquals(keySerialClone, keySerial);
+    assertEquals("avro", valueSerial.getName());
+    assertEquals(writerValueSerialStr, valueSerial.toString());
+    assertEquals(valueSerialClone, valueSerial);
+
+    assertEquals(1, ((AvroKey) reader.nextKey(key)).value);
+    assertEquals(new Utf8("one"), 
+                 ((AvroValue) reader.getCurrentValue(value)).value);
+    assertEquals(2, ((AvroKey) reader.nextKey(key)).value);
+    assertEquals(new Utf8("two"), 
+                 ((AvroValue) reader.getCurrentValue(value)).value);
+    assertNull(reader.nextKey(null));
+    reader.close();    
+  }
+
+  public void readAvroGenericSerialization() throws Exception {
+    Serialization<?> serial = new AvroSerialization(Kind.GENERIC);
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file),
+                               SequenceFile.Reader.keySerialization(serial),
+                               SequenceFile.Reader.valueSerialization(serial.clone()));
+    
+    assertEquals(1, ((GenericRecord) reader.nextKey(null)).get("value"));
+    assertEquals(new Utf8("one"), 
+                 ((GenericRecord) reader.getCurrentValue(null)).get("value"));
+    assertEquals(2, ((GenericRecord) reader.nextKey(null)).get("value"));
+    assertEquals(new Utf8("two"), 
+                 ((GenericRecord) reader.getCurrentValue(null)).get("value"));
+    assertNull(reader.nextKey(null));
+    reader.close();        
+  }
+
+  public void writeProtobufSerialization(CompressionType kind
+                                         ) throws Exception {
     fs.delete(file, true);
-    Writer writer = SequenceFile.createWriter(fs, conf, file, Long.class,
-        String.class);
+    Writer writer = 
+      SequenceFile.createWriter(conf,
+                                SequenceFile.Writer.file(file),
+                                SequenceFile.Writer.compression(kind),
+                                SequenceFile.Writer.keyClass(ProtoKey.class),
+                                SequenceFile.Writer.valueClass(ProtoValue.class));
+    writer.append(ProtoKey.newBuilder().setValue(1).build(), 
+                  ProtoValue.newBuilder().setValue("one").build());
+    writer.append(ProtoKey.newBuilder().setValue(2).build(), 
+                  ProtoValue.newBuilder().setValue("two").build());
+    String keySerialStr = writer.getKeySerialization().toString();
+    assertEquals("{class: org.apache.hadoop.io.ProtoTest$ProtoKey}\n", 
+                 keySerialStr);
+    String valueSerialStr = writer.getValueSerialization().toString();
+    assertEquals("{class: org.apache.hadoop.io.ProtoTest$ProtoValue}\n", 
+                 valueSerialStr);
+    writer.close();
+
+    // build serializers from the string form
+    SerializationFactory factory = SerializationFactory.getInstance(conf);
+    Serialization<?> keySerial = factory.getSerialization("protobuf");
+    keySerial.fromString(keySerialStr);
+    Serialization<?> valueSerial = factory.getSerialization("protobuf");
+    valueSerial.fromString(valueSerialStr);
+
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file));
+    assertEquals(kind, reader.getCompressionType());
+    Serialization<?> readerKeySerial = reader.getKeySerialization();
+    Serialization<?> readerValueSerial = reader.getValueSerialization();
+    assertEquals("protobuf", readerKeySerial.getName());
+    assertEquals(keySerialStr, readerKeySerial.toString());
+    assertEquals(keySerial, readerKeySerial);
+    assertEquals("protobuf", readerValueSerial.getName());
+    assertEquals(valueSerialStr, readerValueSerial.toString());
+    assertEquals(valueSerial, readerValueSerial);
+
+    assertEquals(ProtoKey.newBuilder().setValue(1).build(), 
+                 reader.nextKey(null));
+    assertEquals(ProtoValue.newBuilder().setValue("one").build(), 
+                 reader.getCurrentValue(null));
+    assertEquals(ProtoKey.newBuilder().setValue(2).build(), 
+                 reader.nextKey(null));
+    assertEquals(ProtoValue.newBuilder().setValue("two").build(), 
+                 reader.getCurrentValue(null));
+    assertNull(reader.nextKey(null));
+    reader.close();    
+  }
+
+  public void writeThriftSerialization(CompressionType kind) throws Exception {
+    fs.delete(file, true);
+    Writer writer = 
+      SequenceFile.createWriter(conf,
+                                SequenceFile.Writer.file(file),
+                                SequenceFile.Writer.compression(kind),
+                                SequenceFile.Writer.keyClass(ThriftKey.class),
+                                SequenceFile.Writer.valueClass(ThriftValue.class));
+    writer.append(new ThriftKey(1), new ThriftValue("one"));
+    writer.append(new ThriftKey(2), new ThriftValue("two"));
+    writer.close();
+
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file));
+    assertEquals(kind, reader.getCompressionType());
+    assertEquals("thrift", reader.getKeySerialization().getName());
+    assertEquals("thrift", reader.getValueSerialization().getName());
+    assertEquals(new ThriftKey(1), reader.nextKey(null));
+    assertEquals(new ThriftValue("one"), reader.getCurrentValue(null));
+    assertEquals(new ThriftKey(2), reader.nextKey(null));
+    assertEquals(new ThriftValue("two"), reader.getCurrentValue(null));
+    assertNull(reader.nextKey(null));
+    reader.close();    
+  }
+  
+  public void writeWritableSerialization(CompressionType kind
+                                         ) throws Exception {
+    fs.delete(file, true);
+    Writer writer = 
+      SequenceFile.createWriter(conf,
+                                SequenceFile.Writer.file(file),
+                                SequenceFile.Writer.compression(kind),
+                                SequenceFile.Writer.keyClass(IntWritable.class),
+                                SequenceFile.Writer.valueClass(Text.class));
+    writer.append(new IntWritable(1), new Text("one"));
+    writer.append(new IntWritable(2), new Text("two"));
+    writer.close();
+
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file));
+    assertEquals(kind, reader.getCompressionType());
+    assertEquals("writable", reader.getKeySerialization().getName());
+    assertEquals("writable", reader.getValueSerialization().getName());
+    assertEquals(new IntWritable(1), reader.nextKey(null));
+    assertEquals(new Text("one"), reader.getCurrentValue(null));
+    assertEquals(new IntWritable(2), reader.nextKey(null));
+    assertEquals(new Text("two"), reader.getCurrentValue(null));
+    assertNull(reader.nextKey(null));
+    reader.close();    
+  }
+
+  public void writeJavaSerialization(CompressionType kind) throws Exception {
+    fs.delete(file, true);
+    conf.set(CommonConfigurationKeysPublic.HADOOP_SERIALIZATIONS_KEY,
+             JavaSerialization.class.getName());
     
+    Writer writer = 
+      SequenceFile.createWriter(conf,
+                                SequenceFile.Writer.file(file),
+                                SequenceFile.Writer.compression(kind),
+                                SequenceFile.Writer.keyClass(Long.class),
+                                SequenceFile.Writer.valueClass(String.class));
     writer.append(1L, "one");
     writer.append(2L, "two");
-    
     writer.close();
     
-    Reader reader = new Reader(fs, file, conf);
-    assertEquals(1L, reader.next((Object) null));
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file));
+    assertEquals(kind, reader.getCompressionType());
+    assertEquals("java", reader.getKeySerialization().getName());
+    assertEquals("java", reader.getValueSerialization().getName());
+    assertEquals(1L, reader.nextKey(null));
     assertEquals("one", reader.getCurrentValue((Object) null));
-    assertEquals(2L, reader.next((Object) null));
+    assertEquals(2L, reader.nextKey(null));
     assertEquals("two", reader.getCurrentValue((Object) null));
-    assertNull(reader.next((Object) null));
+    assertNull(reader.nextKey(null));
     reader.close();
     
   }
+
+  /**
+   * Test the compatibility layer to load the old java serialization.
+   */
+  public void writeOldJavaSerialization(CompressionType kind
+                                        ) throws Exception {
+    fs.delete(file, true);
+    // set the old attribute to include the java serialization
+    conf.set("io.serializations",
+             "org.apache.hadoop.io.serializer.JavaSerialization");
+    SerializationFactory factory = SerializationFactory.getInstance(conf);
+    Serialization<?> serial = factory.getSerializationByType(Long.class);
+    assertEquals(CompatibilitySerialization.class, serial.getClass());
+    
+    Writer writer = 
+      SequenceFile.createWriter(conf,
+                                SequenceFile.Writer.file(file),
+                                SequenceFile.Writer.compression(kind),
+                                SequenceFile.Writer.keyClass(Long.class),
+                                SequenceFile.Writer.valueClass(String.class));
+    writer.append(1L, "one");
+    writer.append(2L, "two");
+    writer.close();
+    
+    Reader reader = new Reader(conf, SequenceFile.Reader.file(file));
+    assertEquals("compatibility", reader.getKeySerialization().getName());
+    assertEquals("compatibility", reader.getValueSerialization().getName());
+    assertEquals(kind, reader.getCompressionType());
+    assertEquals(1L, reader.nextKey(null));
+    assertEquals("one", reader.getCurrentValue((Object) null));
+    assertEquals(2L, reader.nextKey(null));
+    assertEquals("two", reader.getCurrentValue((Object) null));
+    assertNull(reader.nextKey(null));
+    reader.close();
+  }
+  
+  public void testAvro() throws Exception {
+    writeAvroSpecificSerialization(CompressionType.NONE);
+    readAvroGenericSerialization();
+    writeAvroSpecificSerialization(CompressionType.RECORD);
+    writeAvroSpecificSerialization(CompressionType.BLOCK);
+  }
+
+  public void testProtobuf() throws Exception {
+    writeProtobufSerialization(CompressionType.NONE);
+    writeProtobufSerialization(CompressionType.RECORD);
+    writeProtobufSerialization(CompressionType.BLOCK);
+  }
+
+  public void testThrift() throws Exception {
+    writeThriftSerialization(CompressionType.NONE);
+    writeThriftSerialization(CompressionType.RECORD);
+    writeThriftSerialization(CompressionType.BLOCK);
+  }
+
+  public void testWritable() throws Exception {
+    writeWritableSerialization(CompressionType.NONE);
+    writeWritableSerialization(CompressionType.RECORD);
+    writeWritableSerialization(CompressionType.BLOCK);
+  }
+
+  public void testJava() throws Exception {
+    writeJavaSerialization(CompressionType.NONE);
+    writeJavaSerialization(CompressionType.RECORD);
+    writeJavaSerialization(CompressionType.BLOCK);
+  }
+
+  public void testOldJava() throws Exception {
+    writeOldJavaSerialization(CompressionType.NONE);
+    writeOldJavaSerialization(CompressionType.RECORD);
+    writeOldJavaSerialization(CompressionType.BLOCK);
+  }
 }

Added: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftKey.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftKey.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftKey.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftKey.java Sat Dec  4 07:13:10 2010
@@ -0,0 +1,307 @@
+/**
+ * Autogenerated by Thrift
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ */
+package org.apache.hadoop.io;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.thrift.*;
+import org.apache.thrift.async.*;
+import org.apache.thrift.meta_data.*;
+import org.apache.thrift.transport.*;
+import org.apache.thrift.protocol.*;
+
+public class ThriftKey implements TBase<ThriftKey, ThriftKey._Fields>, java.io.Serializable, Cloneable {
+  private static final TStruct STRUCT_DESC = new TStruct("ThriftKey");
+
+  private static final TField VALUE_FIELD_DESC = new TField("value", TType.I32, (short)1);
+
+  public int value;
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements TFieldIdEnum {
+    VALUE((short)1, "value");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // VALUE
+          return VALUE;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __VALUE_ISSET_ID = 0;
+  private BitSet __isset_bit_vector = new BitSet(1);
+
+  public static final Map<_Fields, FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, FieldMetaData> tmpMap = new EnumMap<_Fields, FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.VALUE, new FieldMetaData("value", TFieldRequirementType.DEFAULT, 
+        new FieldValueMetaData(TType.I32)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    FieldMetaData.addStructMetaDataMap(ThriftKey.class, metaDataMap);
+  }
+
+  public ThriftKey() {
+  }
+
+  public ThriftKey(
+    int value)
+  {
+    this();
+    this.value = value;
+    setValueIsSet(true);
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public ThriftKey(ThriftKey other) {
+    __isset_bit_vector.clear();
+    __isset_bit_vector.or(other.__isset_bit_vector);
+    this.value = other.value;
+  }
+
+  public ThriftKey deepCopy() {
+    return new ThriftKey(this);
+  }
+
+  @Override
+  public void clear() {
+    setValueIsSet(false);
+    this.value = 0;
+  }
+
+  public int getValue() {
+    return this.value;
+  }
+
+  public ThriftKey setValue(int value) {
+    this.value = value;
+    setValueIsSet(true);
+    return this;
+  }
+
+  public void unsetValue() {
+    __isset_bit_vector.clear(__VALUE_ISSET_ID);
+  }
+
+  /** Returns true if field value is set (has been asigned a value) and false otherwise */
+  public boolean isSetValue() {
+    return __isset_bit_vector.get(__VALUE_ISSET_ID);
+  }
+
+  public void setValueIsSet(boolean value) {
+    __isset_bit_vector.set(__VALUE_ISSET_ID, value);
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case VALUE:
+      if (value == null) {
+        unsetValue();
+      } else {
+        setValue((Integer)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case VALUE:
+      return new Integer(getValue());
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case VALUE:
+      return isSetValue();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof ThriftKey)
+      return this.equals((ThriftKey)that);
+    return false;
+  }
+
+  public boolean equals(ThriftKey that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_value = true;
+    boolean that_present_value = true;
+    if (this_present_value || that_present_value) {
+      if (!(this_present_value && that_present_value))
+        return false;
+      if (this.value != that.value)
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  public int compareTo(ThriftKey other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    ThriftKey typedOther = (ThriftKey)other;
+
+    lastComparison = Boolean.valueOf(isSetValue()).compareTo(typedOther.isSetValue());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetValue()) {
+      lastComparison = TBaseHelper.compareTo(this.value, typedOther.value);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(TProtocol iprot) throws TException {
+    TField field;
+    iprot.readStructBegin();
+    while (true)
+    {
+      field = iprot.readFieldBegin();
+      if (field.type == TType.STOP) { 
+        break;
+      }
+      switch (field.id) {
+        case 1: // VALUE
+          if (field.type == TType.I32) {
+            this.value = iprot.readI32();
+            setValueIsSet(true);
+          } else { 
+            TProtocolUtil.skip(iprot, field.type);
+          }
+          break;
+        default:
+          TProtocolUtil.skip(iprot, field.type);
+      }
+      iprot.readFieldEnd();
+    }
+    iprot.readStructEnd();
+
+    // check for required fields of primitive type, which can't be checked in the validate method
+    validate();
+  }
+
+  public void write(TProtocol oprot) throws TException {
+    validate();
+
+    oprot.writeStructBegin(STRUCT_DESC);
+    oprot.writeFieldBegin(VALUE_FIELD_DESC);
+    oprot.writeI32(this.value);
+    oprot.writeFieldEnd();
+    oprot.writeFieldStop();
+    oprot.writeStructEnd();
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("ThriftKey(");
+    boolean first = true;
+
+    sb.append("value:");
+    sb.append(this.value);
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws TException {
+    // check for required fields
+  }
+
+}
+

Added: hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftValue.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftValue.java?rev=1042107&view=auto
==============================================================================
--- hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftValue.java (added)
+++ hadoop/common/branches/HADOOP-6685/src/test/core/org/apache/hadoop/io/ThriftValue.java Sat Dec  4 07:13:10 2010
@@ -0,0 +1,309 @@
+/**
+ * Autogenerated by Thrift
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ */
+package org.apache.hadoop.io;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.thrift.*;
+import org.apache.thrift.async.*;
+import org.apache.thrift.meta_data.*;
+import org.apache.thrift.transport.*;
+import org.apache.thrift.protocol.*;
+
+public class ThriftValue implements TBase<ThriftValue, ThriftValue._Fields>, java.io.Serializable, Cloneable {
+  private static final TStruct STRUCT_DESC = new TStruct("ThriftValue");
+
+  private static final TField VALUE_FIELD_DESC = new TField("value", TType.STRING, (short)1);
+
+  public String value;
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements TFieldIdEnum {
+    VALUE((short)1, "value");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // VALUE
+          return VALUE;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+
+  public static final Map<_Fields, FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, FieldMetaData> tmpMap = new EnumMap<_Fields, FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.VALUE, new FieldMetaData("value", TFieldRequirementType.DEFAULT, 
+        new FieldValueMetaData(TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    FieldMetaData.addStructMetaDataMap(ThriftValue.class, metaDataMap);
+  }
+
+  public ThriftValue() {
+  }
+
+  public ThriftValue(
+    String value)
+  {
+    this();
+    this.value = value;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public ThriftValue(ThriftValue other) {
+    if (other.isSetValue()) {
+      this.value = other.value;
+    }
+  }
+
+  public ThriftValue deepCopy() {
+    return new ThriftValue(this);
+  }
+
+  @Override
+  public void clear() {
+    this.value = null;
+  }
+
+  public String getValue() {
+    return this.value;
+  }
+
+  public ThriftValue setValue(String value) {
+    this.value = value;
+    return this;
+  }
+
+  public void unsetValue() {
+    this.value = null;
+  }
+
+  /** Returns true if field value is set (has been asigned a value) and false otherwise */
+  public boolean isSetValue() {
+    return this.value != null;
+  }
+
+  public void setValueIsSet(boolean value) {
+    if (!value) {
+      this.value = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case VALUE:
+      if (value == null) {
+        unsetValue();
+      } else {
+        setValue((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case VALUE:
+      return getValue();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been asigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case VALUE:
+      return isSetValue();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof ThriftValue)
+      return this.equals((ThriftValue)that);
+    return false;
+  }
+
+  public boolean equals(ThriftValue that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_value = true && this.isSetValue();
+    boolean that_present_value = true && that.isSetValue();
+    if (this_present_value || that_present_value) {
+      if (!(this_present_value && that_present_value))
+        return false;
+      if (!this.value.equals(that.value))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  public int compareTo(ThriftValue other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    ThriftValue typedOther = (ThriftValue)other;
+
+    lastComparison = Boolean.valueOf(isSetValue()).compareTo(typedOther.isSetValue());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetValue()) {
+      lastComparison = TBaseHelper.compareTo(this.value, typedOther.value);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(TProtocol iprot) throws TException {
+    TField field;
+    iprot.readStructBegin();
+    while (true)
+    {
+      field = iprot.readFieldBegin();
+      if (field.type == TType.STOP) { 
+        break;
+      }
+      switch (field.id) {
+        case 1: // VALUE
+          if (field.type == TType.STRING) {
+            this.value = iprot.readString();
+          } else { 
+            TProtocolUtil.skip(iprot, field.type);
+          }
+          break;
+        default:
+          TProtocolUtil.skip(iprot, field.type);
+      }
+      iprot.readFieldEnd();
+    }
+    iprot.readStructEnd();
+
+    // check for required fields of primitive type, which can't be checked in the validate method
+    validate();
+  }
+
+  public void write(TProtocol oprot) throws TException {
+    validate();
+
+    oprot.writeStructBegin(STRUCT_DESC);
+    if (this.value != null) {
+      oprot.writeFieldBegin(VALUE_FIELD_DESC);
+      oprot.writeString(this.value);
+      oprot.writeFieldEnd();
+    }
+    oprot.writeFieldStop();
+    oprot.writeStructEnd();
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("ThriftValue(");
+    boolean first = true;
+
+    sb.append("value:");
+    if (this.value == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.value);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws TException {
+    // check for required fields
+  }
+
+}
+