You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2008/09/16 01:55:56 UTC

svn commit: r695690 - in /hadoop/core/trunk: ./ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/core/org/apache/hadoop/io/ src/examples/org/apache/hadoop/examples/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/mapred/org/apache/h...

Author: omalley
Date: Mon Sep 15 16:55:55 2008
New Revision: 695690

URL: http://svn.apache.org/viewvc?rev=695690&view=rev
Log:
HADOOP-4151. Add a byte-comparable interface that both Text and 
BytesWritable implement. (cdouglas via omalley)

Added:
    hadoop/core/trunk/src/core/org/apache/hadoop/io/BinaryComparable.java
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
    hadoop/core/trunk/src/core/org/apache/hadoop/io/BytesWritable.java
    hadoop/core/trunk/src/core/org/apache/hadoop/io/Text.java
    hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StringBytesWritable.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFilter.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
    hadoop/core/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/SortValidator.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Mon Sep 15 16:55:55 2008
@@ -307,6 +307,9 @@
     HADOOP-4147. Remove unused class JobWithTaskContext from class
     JobInProgress. (Amareshwari Sriramadasu via johan)
 
+    HADOOP-4151. Add a byte-comparable interface that both Text and 
+    BytesWritable implement. (cdouglas via omalley)
+
   OPTIMIZATIONS
 
     HADOOP-3556. Removed lock contention in MD5Hash by changing the 

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java (original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java Mon Sep 15 16:55:55 2008
@@ -593,8 +593,8 @@
     int valSize;
     if (value instanceof BytesWritable) {
       BytesWritable val = (BytesWritable) value;
-      bval = val.get();
-      valSize = val.getSize();
+      bval = val.getBytes();
+      valSize = val.getLength();
     } else if (value instanceof Text) {
       Text val = (Text) value;
       bval = val.getBytes();

Added: hadoop/core/trunk/src/core/org/apache/hadoop/io/BinaryComparable.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/BinaryComparable.java?rev=695690&view=auto
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/BinaryComparable.java (added)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/BinaryComparable.java Mon Sep 15 16:55:55 2008
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.io;
+
+/**
+ * Interface supported by {@link org.apache.hadoop.io.WritableComparable}
+ * types supporting ordering/permutation by a representative set of bytes.
+ */
+public abstract class BinaryComparable implements Comparable<BinaryComparable> {
+
+  /**
+   * Return n st bytes 0..n-1 from {#getBytes()} are valid.
+   */
+  public abstract int getLength();
+
+  /**
+   * Return representative byte array for this instance.
+   */
+  public abstract byte[] getBytes();
+
+  /**
+   * Compare bytes from {#getBytes()}.
+   * @see org.apache.hadoop.io.WritableComparator#compareBytes(byte[],int,int,byte[],int,int)
+   */
+  public int compareTo(BinaryComparable other) {
+    if (this == other)
+      return 0;
+    return WritableComparator.compareBytes(getBytes(), 0, getLength(),
+             other.getBytes(), 0, other.getLength());
+  }
+
+  /**
+   * Compare bytes from {#getBytes()} to those provided.
+   */
+  public int compareTo(byte[] other, int off, int len) {
+    return WritableComparator.compareBytes(getBytes(), 0, getLength(),
+             other, off, len);
+  }
+
+  /**
+   * Return true if bytes from {#getBytes()} match.
+   */
+  public boolean equals(Object other) {
+    if (!(other instanceof BinaryComparable))
+      return false;
+    BinaryComparable that = (BinaryComparable)other;
+    if (this.getLength() != that.getLength())
+      return false;
+    return this.compareTo(that) == 0;
+  }
+
+  /**
+   * Return a hash of the bytes returned from {#getBytes()}.
+   * @see org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int)
+   */
+  public int hashCode() {
+    return WritableComparator.hashBytes(getBytes(), getLength());
+  }
+
+}

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/BytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/BytesWritable.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/BytesWritable.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/BytesWritable.java Mon Sep 15 16:55:55 2008
@@ -28,7 +28,8 @@
  * the current capacity. The hash function is the front of the md5 of the 
  * buffer. The sort order is the same as memcmp.
  */
-public class BytesWritable implements WritableComparable {
+public class BytesWritable extends BinaryComparable
+    implements WritableComparable<BinaryComparable> {
   private static final int LENGTH_BYTES = 4;
   private static final byte[] EMPTY_BYTES = {};
 
@@ -51,18 +52,36 @@
   
   /**
    * Get the data from the BytesWritable.
-   * @return The data is only valid between 0 and getSize() - 1.
+   * @return The data is only valid between 0 and getLength() - 1.
    */
-  public byte[] get() {
+  public byte[] getBytes() {
     return bytes;
   }
-  
+
+  /**
+   * Get the data from the BytesWritable.
+   * @deprecated Use {@link #getBytes()} instead.
+   */
+  @Deprecated
+  public byte[] get() {
+    return getBytes();
+  }
+
   /**
    * Get the current size of the buffer.
    */
-  public int getSize() {
+  public int getLength() {
     return size;
   }
+
+  /**
+   * Get the current size of the buffer.
+   * @deprecated Use {@link #getLength()} instead.
+   */
+  @Deprecated
+  public int getSize() {
+    return getLength();
+  }
   
   /**
    * Change the size of the buffer. The values in the old range are preserved
@@ -138,31 +157,18 @@
   }
   
   public int hashCode() {
-    return WritableComparator.hashBytes(bytes, size);
-  }
-  
-  /**
-   * Define the sort order of the BytesWritable.
-   * @param right_obj The other bytes writable
-   * @return Positive if left is bigger than right, 0 if they are equal, and
-   *         negative if left is smaller than right.
-   */
-  public int compareTo(Object right_obj) {
-    BytesWritable right = ((BytesWritable) right_obj);
-    return WritableComparator.compareBytes(bytes, 0, size, 
-                                           right.bytes, 0, right.size);
+    return super.hashCode();
   }
-  
+
   /**
    * Are the two byte sequences equal?
    */
   public boolean equals(Object right_obj) {
-    if (right_obj instanceof BytesWritable) {
-      return compareTo(right_obj) == 0;
-    }
+    if (right_obj instanceof BytesWritable)
+      return super.equals(right_obj);
     return false;
   }
-  
+
   /**
    * Generate the stream of bytes as hex pairs separated by ' '.
    */

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/Text.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/Text.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/Text.java Mon Sep 15 16:55:55 2008
@@ -44,7 +44,8 @@
  * byte array contains valid UTF8 code, calculating the length of an encoded
  * string.
  */
-public class Text implements WritableComparable {
+public class Text extends BinaryComparable
+    implements WritableComparable<BinaryComparable> {
   private static final Log LOG= LogFactory.getLog(Text.class);
   
   private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
@@ -281,33 +282,15 @@
     out.write(bytes, 0, length);
   }
 
-  /** Compare two Texts bytewise using standard UTF8 ordering. */
-  public int compareTo(Object o) {
-    Text that = (Text)o;
-    if (this == that)
-      return 0;
-    else
-      return
-        WritableComparator.compareBytes(bytes, 0, length,
-                                        that.getBytes(), 0, that.getLength());
-  }
-
   /** Returns true iff <code>o</code> is a Text with the same contents.  */
   public boolean equals(Object o) {
-    if (!(o instanceof Text))
-      return false;
-    Text that = (Text)o;
-    if (this == that)
-      return true;
-    else if (this.length != that.length)
-      return false;
-    else
-      return compareTo(o) == 0;
+    if (o instanceof Text)
+      return super.equals(o);
+    return false;
   }
 
-  /** hash function */
   public int hashCode() {
-    return WritableComparator.hashBytes(bytes, length);
+    return super.hashCode();
   }
 
   /** A WritableComparator optimized for Text keys. */

Modified: hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java (original)
+++ hadoop/core/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java Mon Sep 15 16:55:55 2008
@@ -180,11 +180,11 @@
         int keyLength = minKeySize + 
           (keySizeRange != 0 ? random.nextInt(keySizeRange) : 0);
         randomKey.setSize(keyLength);
-        randomizeBytes(randomKey.get(), 0, randomKey.getSize());
+        randomizeBytes(randomKey.getBytes(), 0, randomKey.getLength());
         int valueLength = minValueSize +
           (valueSizeRange != 0 ? random.nextInt(valueSizeRange) : 0);
         randomValue.setSize(valueLength);
-        randomizeBytes(randomValue.get(), 0, randomValue.getSize());
+        randomizeBytes(randomValue.getBytes(), 0, randomValue.getLength());
         output.collect(randomKey, randomValue);
         numBytesToWrite -= keyLength + valueLength;
         reporter.incrCounter(Counters.BYTES_WRITTEN, keyLength + valueLength);

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StringBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StringBytesWritable.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StringBytesWritable.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StringBytesWritable.java Mon Sep 15 16:55:55 2008
@@ -43,7 +43,7 @@
    * Convert BytesWritable to a String.
    */
   String getString() throws IOException {
-    return new String(get(),"UTF8");
+    return new String(getBytes(),"UTF8");
   }
 
   /** {@inheritDoc} */
@@ -59,7 +59,7 @@
    * Compare to a String.
    */
   boolean equals(String str) throws IOException {
-    return WritableComparator.compareBytes(get(), 0, getSize(), 
+    return WritableComparator.compareBytes(getBytes(), 0, getLength(), 
                                    str.getBytes("UTF8"), 0, str.length()) == 0;
   }
 }

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/MapTask.java Mon Sep 15 16:55:55 2008
@@ -292,7 +292,7 @@
       throw wrap;
     }
     DataInputBuffer splitBuffer = new DataInputBuffer();
-    splitBuffer.reset(split.get(), 0, split.getSize());
+    splitBuffer.reset(split.getBytes(), 0, split.getLength());
     instantiatedSplit.readFields(splitBuffer);
     
     // if it is a file split, we can give more details

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java Mon Sep 15 16:55:55 2008
@@ -61,7 +61,7 @@
 
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
-      outStream.write(value.get(), 0, value.getSize());
+      outStream.write(value.getBytes(), 0, value.getLength());
     }
 
     public void writeCompressedBytes(DataOutputStream outStream)
@@ -71,7 +71,7 @@
                                           + "RECORD compression"); 
     }
     public int getSize(){
-      return value.getSize();
+      return value.getLength();
     }
   }
 
@@ -160,7 +160,7 @@
           throws IOException {
 
           wvaluebytes.reset(bvalue);
-          out.appendRaw(bkey.get(), 0, bkey.getSize(), wvaluebytes);
+          out.appendRaw(bkey.getBytes(), 0, bkey.getLength(), wvaluebytes);
           wvaluebytes.reset(null);
         }
 

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFilter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFilter.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFilter.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/SequenceFileInputFilter.java Mon Sep 15 16:55:55 2008
@@ -264,7 +264,7 @@
     }
         
     private long MD5Hashcode(BytesWritable key) throws DigestException {
-      return MD5Hashcode(key.get(), 0, key.getSize());
+      return MD5Hashcode(key.getBytes(), 0, key.getLength());
     }
     synchronized private long MD5Hashcode(byte[] bytes, 
                                           int start, int length) throws DigestException {

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/BinaryProtocol.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/BinaryProtocol.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/BinaryProtocol.java Mon Sep 15 16:55:55 2008
@@ -333,9 +333,9 @@
       stream.write(t.getBytes(), 0, len);
     } else if (obj instanceof BytesWritable) {
       BytesWritable b = (BytesWritable) obj;
-      int len = b.getSize();
+      int len = b.getLength();
       WritableUtils.writeVInt(stream, len);
-      stream.write(b.get(), 0, len);
+      stream.write(b.getBytes(), 0, len);
     } else {
       buffer.reset();
       obj.write(buffer);

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/io/TestBytesWritable.java Mon Sep 15 16:55:55 2008
@@ -27,25 +27,25 @@
   public void testSizeChange() throws Exception {
     byte[] hadoop = "hadoop".getBytes();
     BytesWritable buf = new BytesWritable(hadoop);
-    int size = buf.getSize();
+    int size = buf.getLength();
     int orig_capacity = buf.getCapacity();
     buf.setSize(size*2);
     int new_capacity = buf.getCapacity();
-    System.arraycopy(buf.get(), 0, buf.get(), size, size);
+    System.arraycopy(buf.getBytes(), 0, buf.getBytes(), size, size);
     assertTrue(new_capacity >= size * 2);
-    assertEquals(size * 2, buf.getSize());
+    assertEquals(size * 2, buf.getLength());
     assertTrue(new_capacity != orig_capacity);
     buf.setSize(size*4);
     assertTrue(new_capacity != buf.getCapacity());
     for(int i=0; i < size*2; ++i) {
-      assertEquals(hadoop[i%size], buf.get()[i]);
+      assertEquals(hadoop[i%size], buf.getBytes()[i]);
     }
     // shrink the buffer
     buf.setCapacity(1);
     // make sure the size has been cut down too
-    assertEquals(1, buf.getSize());
+    assertEquals(1, buf.getLength());
     // but that the data is still there
-    assertEquals(hadoop[0], buf.get()[0]);
+    assertEquals(hadoop[0], buf.getBytes()[0]);
   }
   
   public void testHash() throws Exception {

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java Mon Sep 15 16:55:55 2008
@@ -83,11 +83,11 @@
       int keyLength = minKeySize + 
         (keySizeRange != 0 ? random.nextInt(keySizeRange) : 0);
       randomKey.setSize(keyLength);
-      randomizeBytes(randomKey.get(), 0, randomKey.getSize());
+      randomizeBytes(randomKey.getBytes(), 0, randomKey.getLength());
       int valueLength = minValueSize +
         (valueSizeRange != 0 ? random.nextInt(valueSizeRange) : 0);
       randomValue.setSize(valueLength);
-      randomizeBytes(randomValue.get(), 0, randomValue.getSize());
+      randomizeBytes(randomValue.getBytes(), 0, randomValue.getLength());
       writer.append(randomKey, randomValue);
       numBytesToWrite -= keyLength + valueLength;
     }

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/SortValidator.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/SortValidator.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/SortValidator.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/SortValidator.java Mon Sep 15 16:55:55 2008
@@ -72,9 +72,9 @@
   }
   
   static private byte[] pair(BytesWritable a, BytesWritable b) {
-    byte[] pairData = new byte[a.getSize()+ b.getSize()];
-    System.arraycopy(a.get(), 0, pairData, 0, a.getSize());
-    System.arraycopy(b.get(), 0, pairData, a.getSize(), b.getSize());
+    byte[] pairData = new byte[a.getLength()+ b.getLength()];
+    System.arraycopy(a.getBytes(), 0, pairData, 0, a.getLength());
+    System.arraycopy(b.getBytes(), 0, pairData, a.getLength(), b.getLength());
     return pairData;
   }
 
@@ -125,10 +125,10 @@
      */
     static class RawBytesWritable extends Raw  {
       public byte[] getRawBytes(Writable bw) {
-        return ((BytesWritable)bw).get();
+        return ((BytesWritable)bw).getBytes();
       }
       public int getRawBytesLength(Writable bw) {
-        return ((BytesWritable)bw).getSize(); 
+        return ((BytesWritable)bw).getLength(); 
       }
     }
     

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileAsBinaryInputFormat.java Mon Sep 15 16:55:55 2008
@@ -77,9 +77,9 @@
         while (reader.next(bkey, bval)) {
           tkey.set(Integer.toString(r.nextInt(), 36));
           tval.set(Long.toString(r.nextLong(), 36));
-          buf.reset(bkey.get(), bkey.getSize());
+          buf.reset(bkey.getBytes(), bkey.getLength());
           cmpkey.readFields(buf);
-          buf.reset(bval.get(), bval.getSize());
+          buf.reset(bval.getBytes(), bval.getLength());
           cmpval.readFields(buf);
           assertTrue(
               "Keys don't match: " + "*" + cmpkey.toString() + ":" +

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java?rev=695690&r1=695689&r2=695690&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/ThreadedMapBenchmark.java Mon Sep 15 16:55:55 2008
@@ -154,13 +154,13 @@
                          ? random.nextInt(keySizeRange) 
                          : 0);
       randomKey.setSize(keyLength);
-      randomizeBytes(randomKey.get(), 0, randomKey.getSize());
+      randomizeBytes(randomKey.getBytes(), 0, randomKey.getLength());
       int valueLength = minValueSize 
                         + (valueSizeRange != 0 
                            ? random.nextInt(valueSizeRange) 
                            : 0);
       randomValue.setSize(valueLength);
-      randomizeBytes(randomValue.get(), 0, randomValue.getSize());
+      randomizeBytes(randomValue.getBytes(), 0, randomValue.getLength());
       output.collect(randomKey, randomValue);
       numBytesToWrite -= keyLength + valueLength;
       reporter.incrCounter(Counters.BYTES_WRITTEN, 1);