You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2012/08/25 03:03:34 UTC
svn commit: r1377168 [3/7] - in /hadoop/common/trunk/hadoop-common-project:
hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/
hadoop-common/src/main/java/org/apache/hadoop/conf/
hadoop-common/src/main/java/org/apache/hadoop/fs/ h...
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java Sat Aug 25 01:03:22 2012
@@ -42,10 +42,12 @@ public class DoubleWritable implements W
set(value);
}
+ @Override
public void readFields(DataInput in) throws IOException {
value = in.readDouble();
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeDouble(value);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java Sat Aug 25 01:03:22 2012
@@ -23,7 +23,6 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.EnumSet;
import java.util.Iterator;
-import java.util.Collection;
import java.util.AbstractCollection;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -46,8 +45,11 @@ public class EnumSetWritable<E extends E
EnumSetWritable() {
}
+ @Override
public Iterator<E> iterator() { return value.iterator(); }
+ @Override
public int size() { return value.size(); }
+ @Override
public boolean add(E e) {
if (value == null) {
value = EnumSet.of(e);
@@ -109,7 +111,7 @@ public class EnumSetWritable<E extends E
return value;
}
- /** {@inheritDoc} */
+ @Override
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
int length = in.readInt();
@@ -127,7 +129,7 @@ public class EnumSetWritable<E extends E
}
}
- /** {@inheritDoc} */
+ @Override
public void write(DataOutput out) throws IOException {
if (this.value == null) {
out.writeInt(-1);
@@ -152,6 +154,7 @@ public class EnumSetWritable<E extends E
* Returns true if <code>o</code> is an EnumSetWritable with the same value,
* or both are null.
*/
+ @Override
public boolean equals(Object o) {
if (o == null) {
throw new IllegalArgumentException("null argument passed in equal().");
@@ -180,27 +183,25 @@ public class EnumSetWritable<E extends E
return elementType;
}
- /** {@inheritDoc} */
+ @Override
public int hashCode() {
if (value == null)
return 0;
return (int) value.hashCode();
}
- /** {@inheritDoc} */
+ @Override
public String toString() {
if (value == null)
return "(null)";
return value.toString();
}
- /** {@inheritDoc} */
@Override
public Configuration getConf() {
return this.conf;
}
- /** {@inheritDoc} */
@Override
public void setConf(Configuration conf) {
this.conf = conf;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java Sat Aug 25 01:03:22 2012
@@ -39,10 +39,12 @@ public class FloatWritable implements Wr
/** Return the value of this FloatWritable. */
public float get() { return value; }
+ @Override
public void readFields(DataInput in) throws IOException {
value = in.readFloat();
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeFloat(value);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java Sat Aug 25 01:03:22 2012
@@ -114,11 +114,13 @@ public abstract class GenericWritable im
return instance;
}
+ @Override
public String toString() {
return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() +
",value=" + instance.toString()) : "(null)") + "]";
}
+ @Override
public void readFields(DataInput in) throws IOException {
type = in.readByte();
Class<? extends Writable> clazz = getTypes()[type & 0xff];
@@ -131,6 +133,7 @@ public abstract class GenericWritable im
instance.readFields(in);
}
+ @Override
public void write(DataOutput out) throws IOException {
if (type == NOT_SET || instance == null)
throw new IOException("The GenericWritable has NOT been set correctly. type="
@@ -145,10 +148,12 @@ public abstract class GenericWritable im
*/
abstract protected Class<? extends Writable>[] getTypes();
+ @Override
public Configuration getConf() {
return conf;
}
+ @Override
public void setConf(Configuration conf) {
this.conf = conf;
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java Sat Aug 25 01:03:22 2012
@@ -272,9 +272,11 @@ public class IOUtils {
* The /dev/null of OutputStreams.
*/
public static class NullOutputStream extends OutputStream {
+ @Override
public void write(byte[] b, int off, int len) throws IOException {
}
+ @Override
public void write(int b) throws IOException {
}
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java Sat Aug 25 01:03:22 2012
@@ -42,10 +42,12 @@ public class IntWritable implements Writ
/** Return the value of this IntWritable. */
public int get() { return value; }
+ @Override
public void readFields(DataInput in) throws IOException {
value = in.readInt();
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeInt(value);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java Sat Aug 25 01:03:22 2012
@@ -42,15 +42,18 @@ public class LongWritable implements Wri
/** Return the value of this LongWritable. */
public long get() { return value; }
+ @Override
public void readFields(DataInput in) throws IOException {
value = in.readLong();
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeLong(value);
}
/** Returns true iff <code>o</code> is a LongWritable with the same value. */
+ @Override
public boolean equals(Object o) {
if (!(o instanceof LongWritable))
return false;
@@ -58,17 +61,20 @@ public class LongWritable implements Wri
return this.value == other.value;
}
+ @Override
public int hashCode() {
return (int)value;
}
/** Compares two LongWritables. */
+ @Override
public int compareTo(LongWritable o) {
long thisValue = this.value;
long thatValue = o.value;
return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
}
+ @Override
public String toString() {
return Long.toString(value);
}
@@ -79,6 +85,7 @@ public class LongWritable implements Wri
super(LongWritable.class);
}
+ @Override
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
long thisValue = readLong(b1, s1);
@@ -94,6 +101,7 @@ public class LongWritable implements Wri
public int compare(WritableComparable a, WritableComparable b) {
return -super.compare(a, b);
}
+ @Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -super.compare(b1, s1, l1, b2, s2, l2);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java Sat Aug 25 01:03:22 2012
@@ -36,6 +36,7 @@ public class MD5Hash implements Writable
public static final int MD5_LEN = 16;
private static ThreadLocal<MessageDigest> DIGESTER_FACTORY = new ThreadLocal<MessageDigest>() {
+ @Override
protected MessageDigest initialValue() {
try {
return MessageDigest.getInstance("MD5");
@@ -65,6 +66,7 @@ public class MD5Hash implements Writable
}
// javadoc from Writable
+ @Override
public void readFields(DataInput in) throws IOException {
in.readFully(digest);
}
@@ -77,6 +79,7 @@ public class MD5Hash implements Writable
}
// javadoc from Writable
+ @Override
public void write(DataOutput out) throws IOException {
out.write(digest);
}
@@ -155,6 +158,7 @@ public class MD5Hash implements Writable
/** Returns true iff <code>o</code> is an MD5Hash whose digest contains the
* same values. */
+ @Override
public boolean equals(Object o) {
if (!(o instanceof MD5Hash))
return false;
@@ -165,12 +169,14 @@ public class MD5Hash implements Writable
/** Returns a hash code value for this object.
* Only uses the first 4 bytes, since md5s are evenly distributed.
*/
+ @Override
public int hashCode() {
return quarterDigest();
}
/** Compares this object with the specified object for order.*/
+ @Override
public int compareTo(MD5Hash that) {
return WritableComparator.compareBytes(this.digest, 0, MD5_LEN,
that.digest, 0, MD5_LEN);
@@ -182,6 +188,7 @@ public class MD5Hash implements Writable
super(MD5Hash.class);
}
+ @Override
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN);
@@ -196,6 +203,7 @@ public class MD5Hash implements Writable
{'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
/** Returns a string representation of this object. */
+ @Override
public String toString() {
StringBuilder buf = new StringBuilder(MD5_LEN*2);
for (int i = 0; i < MD5_LEN; i++) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java Sat Aug 25 01:03:22 2012
@@ -296,6 +296,7 @@ public class MapFile {
}
/** Close the map. */
+ @Override
public synchronized void close() throws IOException {
data.close();
index.close();
@@ -723,6 +724,7 @@ public class MapFile {
}
/** Close the map. */
+ @Override
public synchronized void close() throws IOException {
if (!indexClosed) {
index.close();
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java Sat Aug 25 01:03:22 2012
@@ -55,27 +55,27 @@ public class MapWritable extends Abstrac
copy(other);
}
- /** {@inheritDoc} */
+ @Override
public void clear() {
instance.clear();
}
- /** {@inheritDoc} */
+ @Override
public boolean containsKey(Object key) {
return instance.containsKey(key);
}
- /** {@inheritDoc} */
+ @Override
public boolean containsValue(Object value) {
return instance.containsValue(value);
}
- /** {@inheritDoc} */
+ @Override
public Set<Map.Entry<Writable, Writable>> entrySet() {
return instance.entrySet();
}
- /** {@inheritDoc} */
+ @Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
@@ -93,27 +93,27 @@ public class MapWritable extends Abstrac
return false;
}
- /** {@inheritDoc} */
+ @Override
public Writable get(Object key) {
return instance.get(key);
}
- /** {@inheritDoc} */
+ @Override
public int hashCode() {
return 1 + this.instance.hashCode();
}
- /** {@inheritDoc} */
+ @Override
public boolean isEmpty() {
return instance.isEmpty();
}
- /** {@inheritDoc} */
+ @Override
public Set<Writable> keySet() {
return instance.keySet();
}
- /** {@inheritDoc} */
+ @Override
@SuppressWarnings("unchecked")
public Writable put(Writable key, Writable value) {
addToMap(key.getClass());
@@ -121,31 +121,30 @@ public class MapWritable extends Abstrac
return instance.put(key, value);
}
- /** {@inheritDoc} */
+ @Override
public void putAll(Map<? extends Writable, ? extends Writable> t) {
for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
put(e.getKey(), e.getValue());
}
}
- /** {@inheritDoc} */
+ @Override
public Writable remove(Object key) {
return instance.remove(key);
}
- /** {@inheritDoc} */
+ @Override
public int size() {
return instance.size();
}
- /** {@inheritDoc} */
+ @Override
public Collection<Writable> values() {
return instance.values();
}
// Writable
- /** {@inheritDoc} */
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
@@ -164,7 +163,6 @@ public class MapWritable extends Abstrac
}
}
- /** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
public void readFields(DataInput in) throws IOException {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java Sat Aug 25 01:03:22 2012
@@ -35,6 +35,7 @@ public class NullWritable implements Wri
/** Returns the single instance of this class. */
public static NullWritable get() { return THIS; }
+ @Override
public String toString() {
return "(null)";
}
@@ -46,8 +47,11 @@ public class NullWritable implements Wri
public int compareTo(NullWritable other) {
return 0;
}
+ @Override
public boolean equals(Object other) { return other instanceof NullWritable; }
+ @Override
public void readFields(DataInput in) throws IOException {}
+ @Override
public void write(DataOutput out) throws IOException {}
/** A Comparator "optimized" for NullWritable. */
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java Sat Aug 25 01:03:22 2012
@@ -66,15 +66,18 @@ public class ObjectWritable implements W
this.instance = instance;
}
+ @Override
public String toString() {
return "OW[class=" + declaredClass + ",value=" + instance + "]";
}
+ @Override
public void readFields(DataInput in) throws IOException {
readObject(in, this, this.conf);
}
+ @Override
public void write(DataOutput out) throws IOException {
writeObject(out, instance, declaredClass, conf);
}
@@ -99,6 +102,7 @@ public class ObjectWritable implements W
super(conf);
this.declaredClass = declaredClass;
}
+ @Override
public void readFields(DataInput in) throws IOException {
String className = UTF8.readString(in);
declaredClass = PRIMITIVE_NAMES.get(className);
@@ -110,6 +114,7 @@ public class ObjectWritable implements W
}
}
}
+ @Override
public void write(DataOutput out) throws IOException {
UTF8.writeString(out, declaredClass.getName());
}
@@ -375,10 +380,12 @@ public class ObjectWritable implements W
return declaredClass;
}
+ @Override
public void setConf(Configuration conf) {
this.conf = conf;
}
+ @Override
public Configuration getConf() {
return this.conf;
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java Sat Aug 25 01:03:22 2012
@@ -50,6 +50,7 @@ public class OutputBuffer extends Filter
private static class Buffer extends ByteArrayOutputStream {
public byte[] getData() { return buf; }
public int getLength() { return count; }
+ @Override
public void reset() { count = 0; }
public void write(InputStream in, int len) throws IOException {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java Sat Aug 25 01:03:22 2012
@@ -194,6 +194,7 @@ public class ReadaheadPool {
this.len = len;
}
+ @Override
public void run() {
if (canceled) return;
// There's a very narrow race here that the file will close right at
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java Sat Aug 25 01:03:22 2012
@@ -24,7 +24,6 @@ import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Sat Aug 25 01:03:22 2012
@@ -625,15 +625,18 @@ public class SequenceFile {
dataSize = length;
}
+ @Override
public int getSize() {
return dataSize;
}
+ @Override
public void writeUncompressedBytes(DataOutputStream outStream)
throws IOException {
outStream.write(data, 0, dataSize);
}
+ @Override
public void writeCompressedBytes(DataOutputStream outStream)
throws IllegalArgumentException, IOException {
throw
@@ -666,10 +669,12 @@ public class SequenceFile {
dataSize = length;
}
+ @Override
public int getSize() {
return dataSize;
}
+ @Override
public void writeUncompressedBytes(DataOutputStream outStream)
throws IOException {
if (decompressedStream == null) {
@@ -687,6 +692,7 @@ public class SequenceFile {
}
}
+ @Override
public void writeCompressedBytes(DataOutputStream outStream)
throws IllegalArgumentException, IOException {
outStream.write(data, 0, dataSize);
@@ -728,6 +734,7 @@ public class SequenceFile {
return new TreeMap<Text, Text>(this.theMetadata);
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeInt(this.theMetadata.size());
Iterator<Map.Entry<Text, Text>> iter =
@@ -739,6 +746,7 @@ public class SequenceFile {
}
}
+ @Override
public void readFields(DataInput in) throws IOException {
int sz = in.readInt();
if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object");
@@ -752,6 +760,7 @@ public class SequenceFile {
}
}
+ @Override
public boolean equals(Object other) {
if (other == null) {
return false;
@@ -788,11 +797,13 @@ public class SequenceFile {
return true;
}
+ @Override
public int hashCode() {
assert false : "hashCode not designed";
return 42; // any arbitrary constant will do
}
+ @Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("size: ").append(this.theMetadata.size()).append("\n");
@@ -1250,6 +1261,7 @@ public class SequenceFile {
Configuration getConf() { return conf; }
/** Close the file. */
+ @Override
public synchronized void close() throws IOException {
keySerializer.close();
uncompressedValSerializer.close();
@@ -1360,6 +1372,7 @@ public class SequenceFile {
}
/** Append a key/value pair. */
+ @Override
@SuppressWarnings("unchecked")
public synchronized void append(Object key, Object val)
throws IOException {
@@ -1392,6 +1405,7 @@ public class SequenceFile {
}
/** Append a key/value pair. */
+ @Override
public synchronized void appendRaw(byte[] keyData, int keyOffset,
int keyLength, ValueBytes val) throws IOException {
@@ -1449,6 +1463,7 @@ public class SequenceFile {
}
/** Compress and flush contents to dfs */
+ @Override
public synchronized void sync() throws IOException {
if (noBufferedRecords > 0) {
super.sync();
@@ -1478,6 +1493,7 @@ public class SequenceFile {
}
/** Close the file. */
+ @Override
public synchronized void close() throws IOException {
if (out != null) {
sync();
@@ -1486,6 +1502,7 @@ public class SequenceFile {
}
/** Append a key/value pair. */
+ @Override
@SuppressWarnings("unchecked")
public synchronized void append(Object key, Object val)
throws IOException {
@@ -1518,6 +1535,7 @@ public class SequenceFile {
}
/** Append a key/value pair. */
+ @Override
public synchronized void appendRaw(byte[] keyData, int keyOffset,
int keyLength, ValueBytes val) throws IOException {
@@ -1960,6 +1978,7 @@ public class SequenceFile {
}
/** Close the file. */
+ @Override
public synchronized void close() throws IOException {
// Return the decompressors to the pool
CodecPool.returnDecompressor(keyLenDecompressor);
@@ -2618,6 +2637,7 @@ public class SequenceFile {
}
/** Returns the name of the file. */
+ @Override
public String toString() {
return filename;
}
@@ -2948,6 +2968,7 @@ public class SequenceFile {
mergeSort.mergeSort(pointersCopy, pointers, 0, count);
}
class SeqFileComparator implements Comparator<IntWritable> {
+ @Override
public int compare(IntWritable I, IntWritable J) {
return comparator.compare(rawBuffer, keyOffsets[I.get()],
keyLengths[I.get()], rawBuffer,
@@ -3221,6 +3242,7 @@ public class SequenceFile {
this.tmpDir = tmpDir;
this.progress = progress;
}
+ @Override
protected boolean lessThan(Object a, Object b) {
// indicate we're making progress
if (progress != null) {
@@ -3232,6 +3254,7 @@ public class SequenceFile {
msa.getKey().getLength(), msb.getKey().getData(), 0,
msb.getKey().getLength()) < 0;
}
+ @Override
public void close() throws IOException {
SegmentDescriptor ms; // close inputs
while ((ms = (SegmentDescriptor)pop()) != null) {
@@ -3239,12 +3262,15 @@ public class SequenceFile {
}
minSegment = null;
}
+ @Override
public DataOutputBuffer getKey() throws IOException {
return rawKey;
}
+ @Override
public ValueBytes getValue() throws IOException {
return rawValue;
}
+ @Override
public boolean next() throws IOException {
if (size() == 0)
return false;
@@ -3272,6 +3298,7 @@ public class SequenceFile {
return true;
}
+ @Override
public Progress getProgress() {
return mergeProgress;
}
@@ -3469,6 +3496,7 @@ public class SequenceFile {
return preserveInput;
}
+ @Override
public int compareTo(Object o) {
SegmentDescriptor that = (SegmentDescriptor)o;
if (this.segmentLength != that.segmentLength) {
@@ -3481,6 +3509,7 @@ public class SequenceFile {
compareTo(that.segmentPathName.toString());
}
+ @Override
public boolean equals(Object o) {
if (!(o instanceof SegmentDescriptor)) {
return false;
@@ -3495,6 +3524,7 @@ public class SequenceFile {
return false;
}
+ @Override
public int hashCode() {
return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
}
@@ -3584,12 +3614,14 @@ public class SequenceFile {
/** The default cleanup. Subclasses can override this with a custom
* cleanup
*/
+ @Override
public void cleanup() throws IOException {
super.close();
if (super.shouldPreserveInput()) return;
parentContainer.cleanup();
}
+ @Override
public boolean equals(Object o) {
if (!(o instanceof LinkedSegmentsDescriptor)) {
return false;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java Sat Aug 25 01:03:22 2012
@@ -87,6 +87,7 @@ public class SetFile extends MapFile {
}
// javadoc inherited
+ @Override
public boolean seek(WritableComparable key)
throws IOException {
return super.seek(key);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java Sat Aug 25 01:03:22 2012
@@ -57,86 +57,86 @@ public class SortedMapWritable extends A
copy(other);
}
- /** {@inheritDoc} */
+ @Override
public Comparator<? super WritableComparable> comparator() {
// Returning null means we use the natural ordering of the keys
return null;
}
- /** {@inheritDoc} */
+ @Override
public WritableComparable firstKey() {
return instance.firstKey();
}
- /** {@inheritDoc} */
+ @Override
public SortedMap<WritableComparable, Writable>
headMap(WritableComparable toKey) {
return instance.headMap(toKey);
}
- /** {@inheritDoc} */
+ @Override
public WritableComparable lastKey() {
return instance.lastKey();
}
- /** {@inheritDoc} */
+ @Override
public SortedMap<WritableComparable, Writable>
subMap(WritableComparable fromKey, WritableComparable toKey) {
return instance.subMap(fromKey, toKey);
}
- /** {@inheritDoc} */
+ @Override
public SortedMap<WritableComparable, Writable>
tailMap(WritableComparable fromKey) {
return instance.tailMap(fromKey);
}
- /** {@inheritDoc} */
+ @Override
public void clear() {
instance.clear();
}
- /** {@inheritDoc} */
+ @Override
public boolean containsKey(Object key) {
return instance.containsKey(key);
}
- /** {@inheritDoc} */
+ @Override
public boolean containsValue(Object value) {
return instance.containsValue(value);
}
- /** {@inheritDoc} */
+ @Override
public Set<java.util.Map.Entry<WritableComparable, Writable>> entrySet() {
return instance.entrySet();
}
- /** {@inheritDoc} */
+ @Override
public Writable get(Object key) {
return instance.get(key);
}
- /** {@inheritDoc} */
+ @Override
public boolean isEmpty() {
return instance.isEmpty();
}
- /** {@inheritDoc} */
+ @Override
public Set<WritableComparable> keySet() {
return instance.keySet();
}
- /** {@inheritDoc} */
+ @Override
public Writable put(WritableComparable key, Writable value) {
addToMap(key.getClass());
addToMap(value.getClass());
return instance.put(key, value);
}
- /** {@inheritDoc} */
+ @Override
public void putAll(Map<? extends WritableComparable, ? extends Writable> t) {
for (Map.Entry<? extends WritableComparable, ? extends Writable> e:
t.entrySet()) {
@@ -145,22 +145,21 @@ public class SortedMapWritable extends A
}
}
- /** {@inheritDoc} */
+ @Override
public Writable remove(Object key) {
return instance.remove(key);
}
- /** {@inheritDoc} */
+ @Override
public int size() {
return instance.size();
}
- /** {@inheritDoc} */
+ @Override
public Collection<Writable> values() {
return instance.values();
}
- /** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
public void readFields(DataInput in) throws IOException {
@@ -187,7 +186,6 @@ public class SortedMapWritable extends A
}
}
- /** {@inheritDoc} */
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java Sat Aug 25 01:03:22 2012
@@ -54,6 +54,7 @@ public interface Stringifier<T> extends
* Closes this object.
* @throws IOException if an I/O error occurs
* */
+ @Override
public void close() throws IOException;
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java Sat Aug 25 01:03:22 2012
@@ -55,6 +55,7 @@ public class Text extends BinaryComparab
private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
new ThreadLocal<CharsetEncoder>() {
+ @Override
protected CharsetEncoder initialValue() {
return Charset.forName("UTF-8").newEncoder().
onMalformedInput(CodingErrorAction.REPORT).
@@ -64,6 +65,7 @@ public class Text extends BinaryComparab
private static ThreadLocal<CharsetDecoder> DECODER_FACTORY =
new ThreadLocal<CharsetDecoder>() {
+ @Override
protected CharsetDecoder initialValue() {
return Charset.forName("UTF-8").newDecoder().
onMalformedInput(CodingErrorAction.REPORT).
@@ -112,11 +114,13 @@ public class Text extends BinaryComparab
* valid. Please use {@link #copyBytes()} if you
* need the returned array to be precisely the length of the data.
*/
+ @Override
public byte[] getBytes() {
return bytes;
}
/** Returns the number of bytes in the byte array */
+ @Override
public int getLength() {
return length;
}
@@ -281,6 +285,7 @@ public class Text extends BinaryComparab
/** deserialize
*/
+ @Override
public void readFields(DataInput in) throws IOException {
int newLength = WritableUtils.readVInt(in);
setCapacity(newLength, false);
@@ -313,6 +318,7 @@ public class Text extends BinaryComparab
* length uses zero-compressed encoding
* @see Writable#write(DataOutput)
*/
+ @Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, length);
out.write(bytes, 0, length);
@@ -329,6 +335,7 @@ public class Text extends BinaryComparab
}
/** Returns true iff <code>o</code> is a Text with the same contents. */
+ @Override
public boolean equals(Object o) {
if (o instanceof Text)
return super.equals(o);
@@ -346,6 +353,7 @@ public class Text extends BinaryComparab
super(Text.class);
}
+ @Override
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
int n1 = WritableUtils.decodeVIntSize(b1[s1]);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java Sat Aug 25 01:03:22 2012
@@ -57,6 +57,7 @@ public class TwoDArrayWritable implement
public Writable[][] get() { return values; }
+ @Override
public void readFields(DataInput in) throws IOException {
// construct matrix
values = new Writable[in.readInt()][];
@@ -81,6 +82,7 @@ public class TwoDArrayWritable implement
}
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeInt(values.length); // write values
for (int i = 0; i < values.length; i++) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java Sat Aug 25 01:03:22 2012
@@ -110,6 +110,7 @@ public class UTF8 implements WritableCom
System.arraycopy(other.bytes, 0, bytes, 0, length);
}
+ @Override
public void readFields(DataInput in) throws IOException {
length = in.readUnsignedShort();
if (bytes == null || bytes.length < length)
@@ -123,6 +124,7 @@ public class UTF8 implements WritableCom
WritableUtils.skipFully(in, length);
}
+ @Override
public void write(DataOutput out) throws IOException {
out.writeShort(length);
out.write(bytes, 0, length);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java Sat Aug 25 01:03:22 2012
@@ -43,10 +43,12 @@ public class VIntWritable implements Wri
/** Return the value of this VIntWritable. */
public int get() { return value; }
+ @Override
public void readFields(DataInput in) throws IOException {
value = WritableUtils.readVInt(in);
}
+ @Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, value);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java Sat Aug 25 01:03:22 2012
@@ -43,10 +43,12 @@ public class VLongWritable implements Wr
/** Return the value of this LongWritable. */
public long get() { return value; }
+ @Override
public void readFields(DataInput in) throws IOException {
value = WritableUtils.readVLong(in);
}
+ @Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVLong(out, value);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java Sat Aug 25 01:03:22 2012
@@ -39,6 +39,7 @@ public class VersionMismatchException ex
}
/** Returns a string representation of this object. */
+ @Override
public String toString(){
return "A record version mismatch occured. Expecting v"
+ expectedVersion + ", found v" + foundVersion;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java Sat Aug 25 01:03:22 2012
@@ -40,11 +40,13 @@ public abstract class VersionedWritable
public abstract byte getVersion();
// javadoc from Writable
+ @Override
public void write(DataOutput out) throws IOException {
out.writeByte(getVersion()); // store version
}
// javadoc from Writable
+ @Override
public void readFields(DataInput in) throws IOException {
byte version = in.readByte(); // read version
if (version != getVersion())
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java Sat Aug 25 01:03:22 2012
@@ -120,6 +120,7 @@ public class WritableComparator implemen
* Writable#readFields(DataInput)}, then calls {@link
* #compare(WritableComparable,WritableComparable)}.
*/
+ @Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
try {
buffer.reset(b1, s1, l1); // parse key1
@@ -144,6 +145,7 @@ public class WritableComparator implemen
return a.compareTo(b);
}
+ @Override
public int compare(Object a, Object b) {
return compare((WritableComparable)a, (WritableComparable)b);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java Sat Aug 25 01:03:22 2012
@@ -63,6 +63,7 @@ public class BZip2Codec implements Split
* @throws java.io.IOException
* Throws IO exception
*/
+ @Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
return new BZip2CompressionOutputStream(out);
@@ -74,6 +75,7 @@ public class BZip2Codec implements Split
* @return CompressionOutputStream
@throws java.io.IOException
*/
+ @Override
public CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor) throws IOException {
return createOutputStream(out);
@@ -84,6 +86,7 @@ public class BZip2Codec implements Split
*
* @return BZip2DummyCompressor.class
*/
+ @Override
public Class<? extends org.apache.hadoop.io.compress.Compressor> getCompressorType() {
return BZip2DummyCompressor.class;
}
@@ -93,6 +96,7 @@ public class BZip2Codec implements Split
*
* @return Compressor
*/
+ @Override
public Compressor createCompressor() {
return new BZip2DummyCompressor();
}
@@ -106,6 +110,7 @@ public class BZip2Codec implements Split
* @throws java.io.IOException
* Throws IOException
*/
+ @Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
return new BZip2CompressionInputStream(in);
@@ -116,6 +121,7 @@ public class BZip2Codec implements Split
*
* @return CompressionInputStream
*/
+ @Override
public CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor) throws IOException {
return createInputStream(in);
@@ -133,6 +139,7 @@ public class BZip2Codec implements Split
*
* @return CompressionInputStream for BZip2 aligned at block boundaries
*/
+ @Override
public SplitCompressionInputStream createInputStream(InputStream seekableIn,
Decompressor decompressor, long start, long end, READ_MODE readMode)
throws IOException {
@@ -181,6 +188,7 @@ public class BZip2Codec implements Split
*
* @return BZip2DummyDecompressor.class
*/
+ @Override
public Class<? extends org.apache.hadoop.io.compress.Decompressor> getDecompressorType() {
return BZip2DummyDecompressor.class;
}
@@ -190,6 +198,7 @@ public class BZip2Codec implements Split
*
* @return Decompressor
*/
+ @Override
public Decompressor createDecompressor() {
return new BZip2DummyDecompressor();
}
@@ -199,6 +208,7 @@ public class BZip2Codec implements Split
*
* @return A String telling the default bzip2 file extension
*/
+ @Override
public String getDefaultExtension() {
return ".bz2";
}
@@ -226,6 +236,7 @@ public class BZip2Codec implements Split
}
}
+ @Override
public void finish() throws IOException {
if (needsReset) {
// In the case that nothing is written to this stream, we still need to
@@ -245,12 +256,14 @@ public class BZip2Codec implements Split
}
}
+ @Override
public void resetState() throws IOException {
// Cannot write to out at this point because out might not be ready
// yet, as in SequenceFile.Writer implementation.
needsReset = true;
}
+ @Override
public void write(int b) throws IOException {
if (needsReset) {
internalReset();
@@ -258,6 +271,7 @@ public class BZip2Codec implements Split
this.output.write(b);
}
+ @Override
public void write(byte[] b, int off, int len) throws IOException {
if (needsReset) {
internalReset();
@@ -265,6 +279,7 @@ public class BZip2Codec implements Split
this.output.write(b, off, len);
}
+ @Override
public void close() throws IOException {
if (needsReset) {
// In the case that nothing is written to this stream, we still need to
@@ -382,6 +397,7 @@ public class BZip2Codec implements Split
}// end of method
+ @Override
public void close() throws IOException {
if (!needsReset) {
input.close();
@@ -417,6 +433,7 @@ public class BZip2Codec implements Split
*
*/
+ @Override
public int read(byte[] b, int off, int len) throws IOException {
if (needsReset) {
internalReset();
@@ -440,6 +457,7 @@ public class BZip2Codec implements Split
}
+ @Override
public int read() throws IOException {
byte b[] = new byte[1];
int result = this.read(b, 0, 1);
@@ -454,6 +472,7 @@ public class BZip2Codec implements Split
}
}
+ @Override
public void resetState() throws IOException {
// Cannot read from bufferedIn at this point because bufferedIn
// might not be ready
@@ -461,6 +480,7 @@ public class BZip2Codec implements Split
needsReset = true;
}
+ @Override
public long getPos() {
return this.compressedStreamPosition;
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java Sat Aug 25 01:03:22 2012
@@ -78,6 +78,7 @@ public class BlockCompressorStream exten
* Each block contains the uncompressed length for the block, followed by
* one or more length-prefixed blocks of compressed data.
*/
+ @Override
public void write(byte[] b, int off, int len) throws IOException {
// Sanity checks
if (compressor.finished()) {
@@ -132,6 +133,7 @@ public class BlockCompressorStream exten
}
}
+ @Override
public void finish() throws IOException {
if (!compressor.finished()) {
rawWriteInt((int)compressor.getBytesRead());
@@ -142,6 +144,7 @@ public class BlockCompressorStream exten
}
}
+ @Override
protected void compress() throws IOException {
int len = compressor.compress(buffer, 0, buffer.length);
if (len > 0) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java Sat Aug 25 01:03:22 2012
@@ -65,6 +65,7 @@ public class BlockDecompressorStream ext
super(in);
}
+ @Override
protected int decompress(byte[] b, int off, int len) throws IOException {
// Check if we are the beginning of a block
if (noUncompressedBytes == originalBlockSize) {
@@ -104,6 +105,7 @@ public class BlockDecompressorStream ext
return n;
}
+ @Override
protected int getCompressedData() throws IOException {
checkStream();
@@ -126,6 +128,7 @@ public class BlockDecompressorStream ext
return len;
}
+ @Override
public void resetState() throws IOException {
originalBlockSize = 0;
noUncompressedBytes = 0;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java Sat Aug 25 01:03:22 2012
@@ -75,6 +75,7 @@ public class CompressionCodecFactory {
/**
* Print the extension map out as a string.
*/
+ @Override
public String toString() {
StringBuilder buf = new StringBuilder();
Iterator<Map.Entry<String, CompressionCodec>> itr =
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java Sat Aug 25 01:03:22 2012
@@ -55,6 +55,7 @@ public abstract class CompressionInputSt
this.in = in;
}
+ @Override
public void close() throws IOException {
in.close();
}
@@ -63,6 +64,7 @@ public abstract class CompressionInputSt
* Read bytes from the stream.
* Made abstract to prevent leakage to underlying stream.
*/
+ @Override
public abstract int read(byte[] b, int off, int len) throws IOException;
/**
@@ -76,6 +78,7 @@ public abstract class CompressionInputSt
*
* @return Current position in stream as a long
*/
+ @Override
public long getPos() throws IOException {
if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)){
//This way of getting the current position will not work for file
@@ -95,6 +98,7 @@ public abstract class CompressionInputSt
* @throws UnsupportedOperationException
*/
+ @Override
public void seek(long pos) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@@ -104,6 +108,7 @@ public abstract class CompressionInputSt
*
* @throws UnsupportedOperationException
*/
+ @Override
public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java Sat Aug 25 01:03:22 2012
@@ -44,11 +44,13 @@ public abstract class CompressionOutputS
this.out = out;
}
+ @Override
public void close() throws IOException {
finish();
out.close();
}
+ @Override
public void flush() throws IOException {
out.flush();
}
@@ -57,6 +59,7 @@ public abstract class CompressionOutputS
* Write compressed bytes to the stream.
* Made abstract to prevent leakage to underlying stream.
*/
+ @Override
public abstract void write(byte[] b, int off, int len) throws IOException;
/**
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java Sat Aug 25 01:03:22 2012
@@ -59,6 +59,7 @@ public class CompressorStream extends Co
super(out);
}
+ @Override
public void write(byte[] b, int off, int len) throws IOException {
// Sanity checks
if (compressor.finished()) {
@@ -83,6 +84,7 @@ public class CompressorStream extends Co
}
}
+ @Override
public void finish() throws IOException {
if (!compressor.finished()) {
compressor.finish();
@@ -92,10 +94,12 @@ public class CompressorStream extends Co
}
}
+ @Override
public void resetState() throws IOException {
compressor.reset();
}
+ @Override
public void close() throws IOException {
if (!closed) {
finish();
@@ -105,6 +109,7 @@ public class CompressorStream extends Co
}
private byte[] oneByte = new byte[1];
+ @Override
public void write(int b) throws IOException {
oneByte[0] = (byte)(b & 0xff);
write(oneByte, 0, oneByte.length);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java Sat Aug 25 01:03:22 2012
@@ -66,11 +66,13 @@ public class DecompressorStream extends
}
private byte[] oneByte = new byte[1];
+ @Override
public int read() throws IOException {
checkStream();
return (read(oneByte, 0, oneByte.length) == -1) ? -1 : (oneByte[0] & 0xff);
}
+ @Override
public int read(byte[] b, int off, int len) throws IOException {
checkStream();
@@ -163,11 +165,13 @@ public class DecompressorStream extends
}
}
+ @Override
public void resetState() throws IOException {
decompressor.reset();
}
private byte[] skipBytes = new byte[512];
+ @Override
public long skip(long n) throws IOException {
// Sanity checks
if (n < 0) {
@@ -189,11 +193,13 @@ public class DecompressorStream extends
return skipped;
}
+ @Override
public int available() throws IOException {
checkStream();
return (eof) ? 0 : 1;
}
+ @Override
public void close() throws IOException {
if (!closed) {
in.close();
@@ -201,13 +207,16 @@ public class DecompressorStream extends
}
}
+ @Override
public boolean markSupported() {
return false;
}
+ @Override
public synchronized void mark(int readlimit) {
}
+ @Override
public synchronized void reset() throws IOException {
throw new IOException("mark/reset not supported");
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java Sat Aug 25 01:03:22 2012
@@ -37,14 +37,17 @@ public class DefaultCodec implements Con
Configuration conf;
+ @Override
public void setConf(Configuration conf) {
this.conf = conf;
}
+ @Override
public Configuration getConf() {
return conf;
}
+ @Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
// This may leak memory if called in a loop. The createCompressor() call
@@ -57,6 +60,7 @@ public class DefaultCodec implements Con
conf.getInt("io.file.buffer.size", 4*1024));
}
+ @Override
public CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
throws IOException {
@@ -64,20 +68,24 @@ public class DefaultCodec implements Con
conf.getInt("io.file.buffer.size", 4*1024));
}
+ @Override
public Class<? extends Compressor> getCompressorType() {
return ZlibFactory.getZlibCompressorType(conf);
}
+ @Override
public Compressor createCompressor() {
return ZlibFactory.getZlibCompressor(conf);
}
+ @Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
return new DecompressorStream(in, createDecompressor(),
conf.getInt("io.file.buffer.size", 4*1024));
}
+ @Override
public CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
throws IOException {
@@ -85,14 +93,17 @@ public class DefaultCodec implements Con
conf.getInt("io.file.buffer.size", 4*1024));
}
+ @Override
public Class<? extends Decompressor> getDecompressorType() {
return ZlibFactory.getZlibDecompressorType(conf);
}
+ @Override
public Decompressor createDecompressor() {
return ZlibFactory.getZlibDecompressor(conf);
}
+ @Override
public String getDefaultExtension() {
return ".deflate";
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java Sat Aug 25 01:03:22 2012
@@ -20,15 +20,11 @@ package org.apache.hadoop.io.compress;
import java.io.*;
import java.util.zip.GZIPOutputStream;
-import java.util.zip.GZIPInputStream;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.zlib.*;
-import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
-import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
/**
* This class creates gzip compressors/decompressors.
@@ -66,32 +62,39 @@ public class GzipCodec extends DefaultCo
super(out);
}
+ @Override
public void close() throws IOException {
out.close();
}
+ @Override
public void flush() throws IOException {
out.flush();
}
+ @Override
public void write(int b) throws IOException {
out.write(b);
}
+ @Override
public void write(byte[] data, int offset, int length)
throws IOException {
out.write(data, offset, length);
}
+ @Override
public void finish() throws IOException {
((ResetableGZIPOutputStream) out).finish();
}
+ @Override
public void resetState() throws IOException {
((ResetableGZIPOutputStream) out).resetState();
}
}
+ @Override
public CompressionOutputStream createOutputStream(OutputStream out)
throws IOException {
return (ZlibFactory.isNativeZlibLoaded(conf)) ?
@@ -100,6 +103,7 @@ public class GzipCodec extends DefaultCo
new GzipOutputStream(out);
}
+ @Override
public CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
throws IOException {
@@ -110,23 +114,27 @@ public class GzipCodec extends DefaultCo
createOutputStream(out);
}
+ @Override
public Compressor createCompressor() {
return (ZlibFactory.isNativeZlibLoaded(conf))
? new GzipZlibCompressor(conf)
: null;
}
+ @Override
public Class<? extends Compressor> getCompressorType() {
return ZlibFactory.isNativeZlibLoaded(conf)
? GzipZlibCompressor.class
: null;
}
+ @Override
public CompressionInputStream createInputStream(InputStream in)
throws IOException {
return createInputStream(in, null);
}
+ @Override
public CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
throws IOException {
@@ -137,18 +145,21 @@ public class GzipCodec extends DefaultCo
conf.getInt("io.file.buffer.size", 4*1024));
}
+ @Override
public Decompressor createDecompressor() {
return (ZlibFactory.isNativeZlibLoaded(conf))
? new GzipZlibDecompressor()
: new BuiltInGzipDecompressor();
}
+ @Override
public Class<? extends Decompressor> getDecompressorType() {
return ZlibFactory.isNativeZlibLoaded(conf)
? GzipZlibDecompressor.class
: BuiltInGzipDecompressor.class;
}
+ @Override
public String getDefaultExtension() {
return ".gz";
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java Sat Aug 25 01:03:22 2012
@@ -338,6 +338,7 @@ public class CBZip2InputStream extends I
}
+ @Override
public int read() throws IOException {
if (this.in != null) {
@@ -372,6 +373,7 @@ public class CBZip2InputStream extends I
*/
+ @Override
public int read(final byte[] dest, final int offs, final int len)
throws IOException {
if (offs < 0) {
@@ -574,6 +576,7 @@ public class CBZip2InputStream extends I
}
}
+ @Override
public void close() throws IOException {
InputStream inShadow = this.in;
if (inShadow != null) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java Sat Aug 25 01:03:22 2012
@@ -639,6 +639,7 @@ public class CBZip2OutputStream extends
init();
}
+ @Override
public void write(final int b) throws IOException {
if (this.out != null) {
write0(b);
@@ -704,6 +705,7 @@ public class CBZip2OutputStream extends
/**
* Overriden to close the stream.
*/
+ @Override
protected void finalize() throws Throwable {
finish();
super.finalize();
@@ -726,6 +728,7 @@ public class CBZip2OutputStream extends
}
}
+ @Override
public void close() throws IOException {
if (out != null) {
OutputStream outShadow = this.out;
@@ -739,6 +742,7 @@ public class CBZip2OutputStream extends
}
}
+ @Override
public void flush() throws IOException {
OutputStream outShadow = this.out;
if (outShadow != null) {
@@ -849,6 +853,7 @@ public class CBZip2OutputStream extends
return this.blockSize100k;
}
+ @Override
public void write(final byte[] buf, int offs, final int len)
throws IOException {
if (offs < 0) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java Sat Aug 25 01:03:22 2012
@@ -258,6 +258,7 @@ public class Lz4Decompressor implements
return 0;
}
+ @Override
public synchronized void reset() {
finished = false;
compressedDirectBufLen = 0;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java Sat Aug 25 01:03:22 2012
@@ -257,6 +257,7 @@ public class SnappyDecompressor implemen
return 0;
}
+ @Override
public synchronized void reset() {
finished = false;
compressedDirectBufLen = 0;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java Sat Aug 25 01:03:22 2012
@@ -122,7 +122,7 @@ public class BuiltInGzipDecompressor imp
// in the first buffer load? (But how else would one do it?)
}
- /** {@inheritDoc} */
+ @Override
public synchronized boolean needsInput() {
if (state == GzipStateLabel.DEFLATE_STREAM) { // most common case
return inflater.needsInput();
@@ -144,6 +144,7 @@ public class BuiltInGzipDecompressor imp
* the bulk deflate stream, which is a performance hit we don't want
* to absorb. (Decompressor now documents this requirement.)
*/
+ @Override
public synchronized void setInput(byte[] b, int off, int len) {
if (b == null) {
throw new NullPointerException();
@@ -175,6 +176,7 @@ public class BuiltInGzipDecompressor imp
* methods below), the deflate stream is never copied; Inflater operates
* directly on the user's buffer.
*/
+ @Override
public synchronized int decompress(byte[] b, int off, int len)
throws IOException {
int numAvailBytes = 0;
@@ -421,16 +423,17 @@ public class BuiltInGzipDecompressor imp
*
* @return the total (non-negative) number of unprocessed bytes in input
*/
+ @Override
public synchronized int getRemaining() {
return userBufLen;
}
- /** {@inheritDoc} */
+ @Override
public synchronized boolean needsDictionary() {
return inflater.needsDictionary();
}
- /** {@inheritDoc} */
+ @Override
public synchronized void setDictionary(byte[] b, int off, int len) {
inflater.setDictionary(b, off, len);
}
@@ -439,6 +442,7 @@ public class BuiltInGzipDecompressor imp
* Returns true if the end of the gzip substream (single "member") has been
* reached.</p>
*/
+ @Override
public synchronized boolean finished() {
return (state == GzipStateLabel.FINISHED);
}
@@ -447,6 +451,7 @@ public class BuiltInGzipDecompressor imp
* Resets everything, including the input buffer, regardless of whether the
* current gzip substream is finished.</p>
*/
+ @Override
public synchronized void reset() {
// could optionally emit INFO message if state != GzipStateLabel.FINISHED
inflater.reset();
@@ -463,7 +468,7 @@ public class BuiltInGzipDecompressor imp
hasHeaderCRC = false;
}
- /** {@inheritDoc} */
+ @Override
public synchronized void end() {
inflater.end();
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java Sat Aug 25 01:03:22 2012
@@ -48,6 +48,7 @@ public class BuiltInZlibDeflater extends
super();
}
+ @Override
public synchronized int compress(byte[] b, int off, int len)
throws IOException {
return super.deflate(b, off, len);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java Sat Aug 25 01:03:22 2012
@@ -39,6 +39,7 @@ public class BuiltInZlibInflater extends
super();
}
+ @Override
public synchronized int decompress(byte[] b, int off, int len)
throws IOException {
try {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java Sat Aug 25 01:03:22 2012
@@ -259,6 +259,7 @@ public class ZlibCompressor implements C
}
}
+ @Override
public synchronized void setInput(byte[] b, int off, int len) {
if (b== null) {
throw new NullPointerException();
@@ -287,6 +288,7 @@ public class ZlibCompressor implements C
uncompressedDirectBufLen = uncompressedDirectBuf.position();
}
+ @Override
public synchronized void setDictionary(byte[] b, int off, int len) {
if (stream == 0 || b == null) {
throw new NullPointerException();
@@ -297,6 +299,7 @@ public class ZlibCompressor implements C
setDictionary(stream, b, off, len);
}
+ @Override
public synchronized boolean needsInput() {
// Consume remaining compressed data?
if (compressedDirectBuf.remaining() > 0) {
@@ -325,16 +328,19 @@ public class ZlibCompressor implements C
return false;
}
+ @Override
public synchronized void finish() {
finish = true;
}
+ @Override
public synchronized boolean finished() {
// Check if 'zlib' says its 'finished' and
// all compressed data has been consumed
return (finished && compressedDirectBuf.remaining() == 0);
}
+ @Override
public synchronized int compress(byte[] b, int off, int len)
throws IOException {
if (b == null) {
@@ -385,6 +391,7 @@ public class ZlibCompressor implements C
*
* @return the total (non-negative) number of compressed bytes output so far
*/
+ @Override
public synchronized long getBytesWritten() {
checkStream();
return getBytesWritten(stream);
@@ -395,11 +402,13 @@ public class ZlibCompressor implements C
*
* @return the total (non-negative) number of uncompressed bytes input so far
*/
+ @Override
public synchronized long getBytesRead() {
checkStream();
return getBytesRead(stream);
}
+ @Override
public synchronized void reset() {
checkStream();
reset(stream);
@@ -413,6 +422,7 @@ public class ZlibCompressor implements C
userBufOff = userBufLen = 0;
}
+ @Override
public synchronized void end() {
if (stream != 0) {
end(stream);
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java Sat Aug 25 01:03:22 2012
@@ -118,6 +118,7 @@ public class ZlibDecompressor implements
this(CompressionHeader.DEFAULT_HEADER, DEFAULT_DIRECT_BUFFER_SIZE);
}
+ @Override
public synchronized void setInput(byte[] b, int off, int len) {
if (b == null) {
throw new NullPointerException();
@@ -154,6 +155,7 @@ public class ZlibDecompressor implements
userBufLen -= compressedDirectBufLen;
}
+ @Override
public synchronized void setDictionary(byte[] b, int off, int len) {
if (stream == 0 || b == null) {
throw new NullPointerException();
@@ -165,6 +167,7 @@ public class ZlibDecompressor implements
needDict = false;
}
+ @Override
public synchronized boolean needsInput() {
// Consume remaining compressed data?
if (uncompressedDirectBuf.remaining() > 0) {
@@ -184,16 +187,19 @@ public class ZlibDecompressor implements
return false;
}
+ @Override
public synchronized boolean needsDictionary() {
return needDict;
}
+ @Override
public synchronized boolean finished() {
// Check if 'zlib' says it's 'finished' and
// all compressed data has been consumed
return (finished && uncompressedDirectBuf.remaining() == 0);
}
+ @Override
public synchronized int decompress(byte[] b, int off, int len)
throws IOException {
if (b == null) {
@@ -255,6 +261,7 @@ public class ZlibDecompressor implements
*
* @return the total (non-negative) number of unprocessed bytes in input
*/
+ @Override
public synchronized int getRemaining() {
checkStream();
return userBufLen + getRemaining(stream); // userBuf + compressedDirectBuf
@@ -263,6 +270,7 @@ public class ZlibDecompressor implements
/**
* Resets everything including the input buffers (user and direct).</p>
*/
+ @Override
public synchronized void reset() {
checkStream();
reset(stream);
@@ -274,6 +282,7 @@ public class ZlibDecompressor implements
userBufOff = userBufLen = 0;
}
+ @Override
public synchronized void end() {
if (stream != 0) {
end(stream);
@@ -281,6 +290,7 @@ public class ZlibDecompressor implements
}
}
+ @Override
protected void finalize() {
end();
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java Sat Aug 25 01:03:22 2012
@@ -300,6 +300,7 @@ final class BCFile {
* Close the BCFile Writer. Attempting to use the Writer after calling
* <code>close</code> is not allowed and may lead to undetermined results.
*/
+ @Override
public void close() throws IOException {
if (closed == true) {
return;
@@ -447,6 +448,7 @@ final class BCFile {
this.compressAlgo = compressAlgo;
}
+ @Override
public void register(long raw, long begin, long end) {
metaIndex.addEntry(new MetaIndexEntry(name, compressAlgo,
new BlockRegion(begin, end - begin, raw)));
@@ -463,6 +465,7 @@ final class BCFile {
// do nothing
}
+ @Override
public void register(long raw, long begin, long end) {
dataIndex.addBlockRegion(new BlockRegion(begin, end - begin, raw));
}
@@ -671,6 +674,7 @@ final class BCFile {
/**
* Finishing reading the BCFile. Release all resources.
*/
+ @Override
public void close() {
// nothing to be done now
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java?rev=1377168&r1=1377167&r2=1377168&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java Sat Aug 25 01:03:22 2012
@@ -68,6 +68,7 @@ class CompareUtils {
magnitude = m;
}
+ @Override
public long magnitude() {
return magnitude;
}