You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/10/19 04:27:38 UTC
svn commit: r1399950 [13/17] - in
/hadoop/common/branches/HDFS-2802/hadoop-common-project: hadoop-annotations/
hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/
hadoop-auth-examples/ hadoop-auth/ hadoop-auth/src/main/java/org/apa...
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java Fri Oct 19 02:25:55 2012
@@ -1,444 +1,460 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.util;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.zip.Checksum;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.ChecksumException;
-
-/**
- * This class provides inteface and utilities for processing checksums for
- * DFS data transfers.
- */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-public class DataChecksum implements Checksum {
-
- // Misc constants
- public static final int HEADER_LEN = 5; /// 1 byte type and 4 byte len
-
- // checksum types
- public static final int CHECKSUM_NULL = 0;
- public static final int CHECKSUM_CRC32 = 1;
- public static final int CHECKSUM_CRC32C = 2;
-
- private static String[] NAMES = new String[] {
- "NULL", "CRC32", "CRC32C"
- };
-
- private static final int CHECKSUM_NULL_SIZE = 0;
- private static final int CHECKSUM_CRC32_SIZE = 4;
- private static final int CHECKSUM_CRC32C_SIZE = 4;
-
-
- public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) {
- if ( bytesPerChecksum <= 0 ) {
- return null;
- }
-
- switch ( type ) {
- case CHECKSUM_NULL :
- return new DataChecksum( CHECKSUM_NULL, new ChecksumNull(),
- CHECKSUM_NULL_SIZE, bytesPerChecksum );
- case CHECKSUM_CRC32 :
- return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(),
- CHECKSUM_CRC32_SIZE, bytesPerChecksum );
- case CHECKSUM_CRC32C:
- return new DataChecksum( CHECKSUM_CRC32C, new PureJavaCrc32C(),
- CHECKSUM_CRC32C_SIZE, bytesPerChecksum);
- default:
- return null;
- }
- }
-
- /**
- * Creates a DataChecksum from HEADER_LEN bytes from arr[offset].
- * @return DataChecksum of the type in the array or null in case of an error.
- */
- public static DataChecksum newDataChecksum( byte bytes[], int offset ) {
- if ( offset < 0 || bytes.length < offset + HEADER_LEN ) {
- return null;
- }
-
- // like readInt():
- int bytesPerChecksum = ( (bytes[offset+1] & 0xff) << 24 ) |
- ( (bytes[offset+2] & 0xff) << 16 ) |
- ( (bytes[offset+3] & 0xff) << 8 ) |
- ( (bytes[offset+4] & 0xff) );
- return newDataChecksum( bytes[0], bytesPerChecksum );
- }
-
- /**
- * This constructucts a DataChecksum by reading HEADER_LEN bytes from
- * input stream <i>in</i>
- */
- public static DataChecksum newDataChecksum( DataInputStream in )
- throws IOException {
- int type = in.readByte();
- int bpc = in.readInt();
- DataChecksum summer = newDataChecksum( type, bpc );
- if ( summer == null ) {
- throw new IOException( "Could not create DataChecksum of type " +
- type + " with bytesPerChecksum " + bpc );
- }
- return summer;
- }
-
- /**
- * Writes the checksum header to the output stream <i>out</i>.
- */
- public void writeHeader( DataOutputStream out )
- throws IOException {
- out.writeByte( type );
- out.writeInt( bytesPerChecksum );
- }
-
- public byte[] getHeader() {
- byte[] header = new byte[DataChecksum.HEADER_LEN];
- header[0] = (byte) (type & 0xff);
- // Writing in buffer just like DataOutput.WriteInt()
- header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
- header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
- header[1+2] = (byte) ((bytesPerChecksum >>> 8) & 0xff);
- header[1+3] = (byte) (bytesPerChecksum & 0xff);
- return header;
- }
-
- /**
- * Writes the current checksum to the stream.
- * If <i>reset</i> is true, then resets the checksum.
- * @return number of bytes written. Will be equal to getChecksumSize();
- */
- public int writeValue( DataOutputStream out, boolean reset )
- throws IOException {
- if ( size <= 0 ) {
- return 0;
- }
-
- if ( size == 4 ) {
- out.writeInt( (int) summer.getValue() );
- } else {
- throw new IOException( "Unknown Checksum " + type );
- }
-
- if ( reset ) {
- reset();
- }
-
- return size;
- }
-
- /**
- * Writes the current checksum to a buffer.
- * If <i>reset</i> is true, then resets the checksum.
- * @return number of bytes written. Will be equal to getChecksumSize();
- */
- public int writeValue( byte[] buf, int offset, boolean reset )
- throws IOException {
- if ( size <= 0 ) {
- return 0;
- }
-
- if ( size == 4 ) {
- int checksum = (int) summer.getValue();
- buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
- buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
- buf[offset+2] = (byte) ((checksum >>> 8) & 0xff);
- buf[offset+3] = (byte) (checksum & 0xff);
- } else {
- throw new IOException( "Unknown Checksum " + type );
- }
-
- if ( reset ) {
- reset();
- }
-
- return size;
- }
-
- /**
- * Compares the checksum located at buf[offset] with the current checksum.
- * @return true if the checksum matches and false otherwise.
- */
- public boolean compare( byte buf[], int offset ) {
- if ( size == 4 ) {
- int checksum = ( (buf[offset+0] & 0xff) << 24 ) |
- ( (buf[offset+1] & 0xff) << 16 ) |
- ( (buf[offset+2] & 0xff) << 8 ) |
- ( (buf[offset+3] & 0xff) );
- return checksum == (int) summer.getValue();
- }
- return size == 0;
- }
-
- private final int type;
- private final int size;
- private final Checksum summer;
- private final int bytesPerChecksum;
- private int inSum = 0;
-
- private DataChecksum( int checksumType, Checksum checksum,
- int sumSize, int chunkSize ) {
- type = checksumType;
- summer = checksum;
- size = sumSize;
- bytesPerChecksum = chunkSize;
- }
-
- // Accessors
- public int getChecksumType() {
- return type;
- }
- public int getChecksumSize() {
- return size;
- }
- public int getBytesPerChecksum() {
- return bytesPerChecksum;
- }
- public int getNumBytesInSum() {
- return inSum;
- }
-
- public static final int SIZE_OF_INTEGER = Integer.SIZE / Byte.SIZE;
- static public int getChecksumHeaderSize() {
- return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int
- }
- //Checksum Interface. Just a wrapper around member summer.
- public long getValue() {
- return summer.getValue();
- }
- public void reset() {
- summer.reset();
- inSum = 0;
- }
- public void update( byte[] b, int off, int len ) {
- if ( len > 0 ) {
- summer.update( b, off, len );
- inSum += len;
- }
- }
- public void update( int b ) {
- summer.update( b );
- inSum += 1;
- }
-
- /**
- * Verify that the given checksums match the given data.
- *
- * The 'mark' of the ByteBuffer parameters may be modified by this function,.
- * but the position is maintained.
- *
- * @param data the DirectByteBuffer pointing to the data to verify.
- * @param checksums the DirectByteBuffer pointing to a series of stored
- * checksums
- * @param fileName the name of the file being read, for error-reporting
- * @param basePos the file position to which the start of 'data' corresponds
- * @throws ChecksumException if the checksums do not match
- */
- public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
- String fileName, long basePos)
- throws ChecksumException {
- if (size == 0) return;
-
- if (data.hasArray() && checksums.hasArray()) {
- verifyChunkedSums(
- data.array(), data.arrayOffset() + data.position(), data.remaining(),
- checksums.array(), checksums.arrayOffset() + checksums.position(),
- fileName, basePos);
- return;
- }
- if (NativeCrc32.isAvailable()) {
- NativeCrc32.verifyChunkedSums(bytesPerChecksum, type, checksums, data,
- fileName, basePos);
- return;
- }
-
- int startDataPos = data.position();
- data.mark();
- checksums.mark();
- try {
- byte[] buf = new byte[bytesPerChecksum];
- byte[] sum = new byte[size];
- while (data.remaining() > 0) {
- int n = Math.min(data.remaining(), bytesPerChecksum);
- checksums.get(sum);
- data.get(buf, 0, n);
- summer.reset();
- summer.update(buf, 0, n);
- int calculated = (int)summer.getValue();
- int stored = (sum[0] << 24 & 0xff000000) |
- (sum[1] << 16 & 0xff0000) |
- (sum[2] << 8 & 0xff00) |
- sum[3] & 0xff;
- if (calculated != stored) {
- long errPos = basePos + data.position() - startDataPos - n;
- throw new ChecksumException(
- "Checksum error: "+ fileName + " at "+ errPos +
- " exp: " + stored + " got: " + calculated, errPos);
- }
- }
- } finally {
- data.reset();
- checksums.reset();
- }
- }
-
- /**
- * Implementation of chunked verification specifically on byte arrays. This
- * is to avoid the copy when dealing with ByteBuffers that have array backing.
- */
- private void verifyChunkedSums(
- byte[] data, int dataOff, int dataLen,
- byte[] checksums, int checksumsOff, String fileName,
- long basePos) throws ChecksumException {
-
- int remaining = dataLen;
- int dataPos = 0;
- while (remaining > 0) {
- int n = Math.min(remaining, bytesPerChecksum);
-
- summer.reset();
- summer.update(data, dataOff + dataPos, n);
- dataPos += n;
- remaining -= n;
-
- int calculated = (int)summer.getValue();
- int stored = (checksums[checksumsOff] << 24 & 0xff000000) |
- (checksums[checksumsOff + 1] << 16 & 0xff0000) |
- (checksums[checksumsOff + 2] << 8 & 0xff00) |
- checksums[checksumsOff + 3] & 0xff;
- checksumsOff += 4;
- if (calculated != stored) {
- long errPos = basePos + dataPos - n;
- throw new ChecksumException(
- "Checksum error: "+ fileName + " at "+ errPos +
- " exp: " + stored + " got: " + calculated, errPos);
- }
- }
- }
-
- /**
- * Calculate checksums for the given data.
- *
- * The 'mark' of the ByteBuffer parameters may be modified by this function,
- * but the position is maintained.
- *
- * @param data the DirectByteBuffer pointing to the data to checksum.
- * @param checksums the DirectByteBuffer into which checksums will be
- * stored. Enough space must be available in this
- * buffer to put the checksums.
- */
- public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
- if (size == 0) return;
-
- if (data.hasArray() && checksums.hasArray()) {
- calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
- checksums.array(), checksums.arrayOffset() + checksums.position());
- return;
- }
-
- data.mark();
- checksums.mark();
- try {
- byte[] buf = new byte[bytesPerChecksum];
- while (data.remaining() > 0) {
- int n = Math.min(data.remaining(), bytesPerChecksum);
- data.get(buf, 0, n);
- summer.reset();
- summer.update(buf, 0, n);
- checksums.putInt((int)summer.getValue());
- }
- } finally {
- data.reset();
- checksums.reset();
- }
- }
-
- /**
- * Implementation of chunked calculation specifically on byte arrays. This
- * is to avoid the copy when dealing with ByteBuffers that have array backing.
- */
- private void calculateChunkedSums(
- byte[] data, int dataOffset, int dataLength,
- byte[] sums, int sumsOffset) {
-
- int remaining = dataLength;
- while (remaining > 0) {
- int n = Math.min(remaining, bytesPerChecksum);
- summer.reset();
- summer.update(data, dataOffset, n);
- dataOffset += n;
- remaining -= n;
- long calculated = summer.getValue();
- sums[sumsOffset++] = (byte) (calculated >> 24);
- sums[sumsOffset++] = (byte) (calculated >> 16);
- sums[sumsOffset++] = (byte) (calculated >> 8);
- sums[sumsOffset++] = (byte) (calculated);
- }
- }
-
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof DataChecksum)) {
- return false;
- }
- DataChecksum o = (DataChecksum)other;
- return o.bytesPerChecksum == this.bytesPerChecksum &&
- o.type == this.type;
- }
-
- @Override
- public int hashCode() {
- return (this.type + 31) * this.bytesPerChecksum;
- }
-
- @Override
- public String toString() {
- String strType;
- if (type < NAMES.length && type > 0) {
- strType = NAMES[type];
- } else {
- strType = String.valueOf(type);
- }
- return "DataChecksum(type=" + strType +
- ", chunkSize=" + bytesPerChecksum + ")";
- }
-
- /**
- * This just provides a dummy implimentation for Checksum class
- * This is used when there is no checksum available or required for
- * data
- */
- static class ChecksumNull implements Checksum {
-
- public ChecksumNull() {}
-
- //Dummy interface
- public long getValue() { return 0; }
- public void reset() {}
- public void update(byte[] b, int off, int len) {}
- public void update(int b) {}
- };
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.zip.Checksum;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.ChecksumException;
+
+/**
+ * This class provides inteface and utilities for processing checksums for
+ * DFS data transfers.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class DataChecksum implements Checksum {
+
+ // Misc constants
+ public static final int HEADER_LEN = 5; /// 1 byte type and 4 byte len
+
+ // checksum types
+ public static final int CHECKSUM_NULL = 0;
+ public static final int CHECKSUM_CRC32 = 1;
+ public static final int CHECKSUM_CRC32C = 2;
+ public static final int CHECKSUM_DEFAULT = 3;
+ public static final int CHECKSUM_MIXED = 4;
+
+ /** The checksum types */
+ public static enum Type {
+ NULL (CHECKSUM_NULL, 0),
+ CRC32 (CHECKSUM_CRC32, 4),
+ CRC32C(CHECKSUM_CRC32C, 4),
+ DEFAULT(CHECKSUM_DEFAULT, 0), // This cannot be used to create DataChecksum
+ MIXED (CHECKSUM_MIXED, 0); // This cannot be used to create DataChecksum
+
+ public final int id;
+ public final int size;
+
+ private Type(int id, int size) {
+ this.id = id;
+ this.size = size;
+ }
+
+ /** @return the type corresponding to the id. */
+ public static Type valueOf(int id) {
+ if (id < 0 || id >= values().length) {
+ throw new IllegalArgumentException("id=" + id
+ + " out of range [0, " + values().length + ")");
+ }
+ return values()[id];
+ }
+ }
+
+
+ public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) {
+ if ( bytesPerChecksum <= 0 ) {
+ return null;
+ }
+
+ switch ( type ) {
+ case NULL :
+ return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum );
+ case CRC32 :
+ return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum );
+ case CRC32C:
+ return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum);
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Creates a DataChecksum from HEADER_LEN bytes from arr[offset].
+ * @return DataChecksum of the type in the array or null in case of an error.
+ */
+ public static DataChecksum newDataChecksum( byte bytes[], int offset ) {
+ if ( offset < 0 || bytes.length < offset + HEADER_LEN ) {
+ return null;
+ }
+
+ // like readInt():
+ int bytesPerChecksum = ( (bytes[offset+1] & 0xff) << 24 ) |
+ ( (bytes[offset+2] & 0xff) << 16 ) |
+ ( (bytes[offset+3] & 0xff) << 8 ) |
+ ( (bytes[offset+4] & 0xff) );
+ return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum );
+ }
+
+ /**
+ * This constructucts a DataChecksum by reading HEADER_LEN bytes from
+ * input stream <i>in</i>
+ */
+ public static DataChecksum newDataChecksum( DataInputStream in )
+ throws IOException {
+ int type = in.readByte();
+ int bpc = in.readInt();
+ DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc );
+ if ( summer == null ) {
+ throw new IOException( "Could not create DataChecksum of type " +
+ type + " with bytesPerChecksum " + bpc );
+ }
+ return summer;
+ }
+
+ /**
+ * Writes the checksum header to the output stream <i>out</i>.
+ */
+ public void writeHeader( DataOutputStream out )
+ throws IOException {
+ out.writeByte( type.id );
+ out.writeInt( bytesPerChecksum );
+ }
+
+ public byte[] getHeader() {
+ byte[] header = new byte[DataChecksum.HEADER_LEN];
+ header[0] = (byte) (type.id & 0xff);
+ // Writing in buffer just like DataOutput.WriteInt()
+ header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
+ header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
+ header[1+2] = (byte) ((bytesPerChecksum >>> 8) & 0xff);
+ header[1+3] = (byte) (bytesPerChecksum & 0xff);
+ return header;
+ }
+
+ /**
+ * Writes the current checksum to the stream.
+ * If <i>reset</i> is true, then resets the checksum.
+ * @return number of bytes written. Will be equal to getChecksumSize();
+ */
+ public int writeValue( DataOutputStream out, boolean reset )
+ throws IOException {
+ if ( type.size <= 0 ) {
+ return 0;
+ }
+
+ if ( type.size == 4 ) {
+ out.writeInt( (int) summer.getValue() );
+ } else {
+ throw new IOException( "Unknown Checksum " + type );
+ }
+
+ if ( reset ) {
+ reset();
+ }
+
+ return type.size;
+ }
+
+ /**
+ * Writes the current checksum to a buffer.
+ * If <i>reset</i> is true, then resets the checksum.
+ * @return number of bytes written. Will be equal to getChecksumSize();
+ */
+ public int writeValue( byte[] buf, int offset, boolean reset )
+ throws IOException {
+ if ( type.size <= 0 ) {
+ return 0;
+ }
+
+ if ( type.size == 4 ) {
+ int checksum = (int) summer.getValue();
+ buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
+ buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
+ buf[offset+2] = (byte) ((checksum >>> 8) & 0xff);
+ buf[offset+3] = (byte) (checksum & 0xff);
+ } else {
+ throw new IOException( "Unknown Checksum " + type );
+ }
+
+ if ( reset ) {
+ reset();
+ }
+
+ return type.size;
+ }
+
+ /**
+ * Compares the checksum located at buf[offset] with the current checksum.
+ * @return true if the checksum matches and false otherwise.
+ */
+ public boolean compare( byte buf[], int offset ) {
+ if ( type.size == 4 ) {
+ int checksum = ( (buf[offset+0] & 0xff) << 24 ) |
+ ( (buf[offset+1] & 0xff) << 16 ) |
+ ( (buf[offset+2] & 0xff) << 8 ) |
+ ( (buf[offset+3] & 0xff) );
+ return checksum == (int) summer.getValue();
+ }
+ return type.size == 0;
+ }
+
+ private final Type type;
+ private final Checksum summer;
+ private final int bytesPerChecksum;
+ private int inSum = 0;
+
+ private DataChecksum( Type type, Checksum checksum, int chunkSize ) {
+ this.type = type;
+ summer = checksum;
+ bytesPerChecksum = chunkSize;
+ }
+
+ // Accessors
+ public Type getChecksumType() {
+ return type;
+ }
+ public int getChecksumSize() {
+ return type.size;
+ }
+ public int getBytesPerChecksum() {
+ return bytesPerChecksum;
+ }
+ public int getNumBytesInSum() {
+ return inSum;
+ }
+
+ public static final int SIZE_OF_INTEGER = Integer.SIZE / Byte.SIZE;
+ static public int getChecksumHeaderSize() {
+ return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int
+ }
+ //Checksum Interface. Just a wrapper around member summer.
+ @Override
+ public long getValue() {
+ return summer.getValue();
+ }
+ @Override
+ public void reset() {
+ summer.reset();
+ inSum = 0;
+ }
+ @Override
+ public void update( byte[] b, int off, int len ) {
+ if ( len > 0 ) {
+ summer.update( b, off, len );
+ inSum += len;
+ }
+ }
+ @Override
+ public void update( int b ) {
+ summer.update( b );
+ inSum += 1;
+ }
+
+ /**
+ * Verify that the given checksums match the given data.
+ *
+ * The 'mark' of the ByteBuffer parameters may be modified by this function,.
+ * but the position is maintained.
+ *
+ * @param data the DirectByteBuffer pointing to the data to verify.
+ * @param checksums the DirectByteBuffer pointing to a series of stored
+ * checksums
+ * @param fileName the name of the file being read, for error-reporting
+ * @param basePos the file position to which the start of 'data' corresponds
+ * @throws ChecksumException if the checksums do not match
+ */
+ public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
+ String fileName, long basePos)
+ throws ChecksumException {
+ if (type.size == 0) return;
+
+ if (data.hasArray() && checksums.hasArray()) {
+ verifyChunkedSums(
+ data.array(), data.arrayOffset() + data.position(), data.remaining(),
+ checksums.array(), checksums.arrayOffset() + checksums.position(),
+ fileName, basePos);
+ return;
+ }
+ if (NativeCrc32.isAvailable()) {
+ NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data,
+ fileName, basePos);
+ return;
+ }
+
+ int startDataPos = data.position();
+ data.mark();
+ checksums.mark();
+ try {
+ byte[] buf = new byte[bytesPerChecksum];
+ byte[] sum = new byte[type.size];
+ while (data.remaining() > 0) {
+ int n = Math.min(data.remaining(), bytesPerChecksum);
+ checksums.get(sum);
+ data.get(buf, 0, n);
+ summer.reset();
+ summer.update(buf, 0, n);
+ int calculated = (int)summer.getValue();
+ int stored = (sum[0] << 24 & 0xff000000) |
+ (sum[1] << 16 & 0xff0000) |
+ (sum[2] << 8 & 0xff00) |
+ sum[3] & 0xff;
+ if (calculated != stored) {
+ long errPos = basePos + data.position() - startDataPos - n;
+ throw new ChecksumException(
+ "Checksum error: "+ fileName + " at "+ errPos +
+ " exp: " + stored + " got: " + calculated, errPos);
+ }
+ }
+ } finally {
+ data.reset();
+ checksums.reset();
+ }
+ }
+
+ /**
+ * Implementation of chunked verification specifically on byte arrays. This
+ * is to avoid the copy when dealing with ByteBuffers that have array backing.
+ */
+ private void verifyChunkedSums(
+ byte[] data, int dataOff, int dataLen,
+ byte[] checksums, int checksumsOff, String fileName,
+ long basePos) throws ChecksumException {
+
+ int remaining = dataLen;
+ int dataPos = 0;
+ while (remaining > 0) {
+ int n = Math.min(remaining, bytesPerChecksum);
+
+ summer.reset();
+ summer.update(data, dataOff + dataPos, n);
+ dataPos += n;
+ remaining -= n;
+
+ int calculated = (int)summer.getValue();
+ int stored = (checksums[checksumsOff] << 24 & 0xff000000) |
+ (checksums[checksumsOff + 1] << 16 & 0xff0000) |
+ (checksums[checksumsOff + 2] << 8 & 0xff00) |
+ checksums[checksumsOff + 3] & 0xff;
+ checksumsOff += 4;
+ if (calculated != stored) {
+ long errPos = basePos + dataPos - n;
+ throw new ChecksumException(
+ "Checksum error: "+ fileName + " at "+ errPos +
+ " exp: " + stored + " got: " + calculated, errPos);
+ }
+ }
+ }
+
+ /**
+ * Calculate checksums for the given data.
+ *
+ * The 'mark' of the ByteBuffer parameters may be modified by this function,
+ * but the position is maintained.
+ *
+ * @param data the DirectByteBuffer pointing to the data to checksum.
+ * @param checksums the DirectByteBuffer into which checksums will be
+ * stored. Enough space must be available in this
+ * buffer to put the checksums.
+ */
+ public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
+ if (type.size == 0) return;
+
+ if (data.hasArray() && checksums.hasArray()) {
+ calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
+ checksums.array(), checksums.arrayOffset() + checksums.position());
+ return;
+ }
+
+ data.mark();
+ checksums.mark();
+ try {
+ byte[] buf = new byte[bytesPerChecksum];
+ while (data.remaining() > 0) {
+ int n = Math.min(data.remaining(), bytesPerChecksum);
+ data.get(buf, 0, n);
+ summer.reset();
+ summer.update(buf, 0, n);
+ checksums.putInt((int)summer.getValue());
+ }
+ } finally {
+ data.reset();
+ checksums.reset();
+ }
+ }
+
+ /**
+ * Implementation of chunked calculation specifically on byte arrays. This
+ * is to avoid the copy when dealing with ByteBuffers that have array backing.
+ */
+ private void calculateChunkedSums(
+ byte[] data, int dataOffset, int dataLength,
+ byte[] sums, int sumsOffset) {
+
+ int remaining = dataLength;
+ while (remaining > 0) {
+ int n = Math.min(remaining, bytesPerChecksum);
+ summer.reset();
+ summer.update(data, dataOffset, n);
+ dataOffset += n;
+ remaining -= n;
+ long calculated = summer.getValue();
+ sums[sumsOffset++] = (byte) (calculated >> 24);
+ sums[sumsOffset++] = (byte) (calculated >> 16);
+ sums[sumsOffset++] = (byte) (calculated >> 8);
+ sums[sumsOffset++] = (byte) (calculated);
+ }
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof DataChecksum)) {
+ return false;
+ }
+ DataChecksum o = (DataChecksum)other;
+ return o.bytesPerChecksum == this.bytesPerChecksum &&
+ o.type == this.type;
+ }
+
+ @Override
+ public int hashCode() {
+ return (this.type.id + 31) * this.bytesPerChecksum;
+ }
+
+ @Override
+ public String toString() {
+ return "DataChecksum(type=" + type +
+ ", chunkSize=" + bytesPerChecksum + ")";
+ }
+
+ /**
+ * This just provides a dummy implimentation for Checksum class
+ * This is used when there is no checksum available or required for
+ * data
+ */
+ static class ChecksumNull implements Checksum {
+
+ public ChecksumNull() {}
+
+ //Dummy interface
+ @Override
+ public long getValue() { return 0; }
+ @Override
+ public void reset() {}
+ @Override
+ public void update(byte[] b, int off, int len) {}
+ @Override
+ public void update(int b) {}
+ };
+}
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java Fri Oct 19 02:25:55 2012
@@ -78,26 +78,32 @@ public class DiskChecker {
}
/**
- * Create the directory if it doesn't exist and
+ * Create the directory if it doesn't exist and check that dir is readable,
+ * writable and executable
+ *
* @param dir
* @throws DiskErrorException
*/
public static void checkDir(File dir) throws DiskErrorException {
if (!mkdirsWithExistsCheck(dir))
- throw new DiskErrorException("can not create directory: "
+ throw new DiskErrorException("Can not create directory: "
+ dir.toString());
-
+
if (!dir.isDirectory())
- throw new DiskErrorException("not a directory: "
+ throw new DiskErrorException("Not a directory: "
+ dir.toString());
-
+
if (!dir.canRead())
- throw new DiskErrorException("directory is not readable: "
+ throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
-
+
if (!dir.canWrite())
- throw new DiskErrorException("directory is not writable: "
+ throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
+
+ if (!dir.canExecute())
+ throw new DiskErrorException("Directory is not executable: "
+ + dir.toString());
}
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java Fri Oct 19 02:25:55 2012
@@ -30,7 +30,7 @@ import org.apache.hadoop.classification.
public final class ExitUtil {
private final static Log LOG = LogFactory.getLog(ExitUtil.class.getName());
private static volatile boolean systemExitDisabled = false;
- private static volatile boolean terminateCalled = false;
+ private static volatile ExitException firstExitException;
public static class ExitException extends RuntimeException {
private static final long serialVersionUID = 1L;
@@ -53,7 +53,24 @@ public final class ExitUtil {
* @return true if terminate has been called
*/
public static boolean terminateCalled() {
- return terminateCalled;
+ // Either we set this member or we actually called System#exit
+ return firstExitException != null;
+ }
+
+ /**
+ * @return the first ExitException thrown, null if none thrown yet
+ */
+ public static ExitException getFirstExitException() {
+ return firstExitException;
+ }
+
+ /**
+ * Reset the tracking of process termination. This is for use
+ * in unit tests where one test in the suite expects an exit
+ * but others do not.
+ */
+ public static void resetFirstExitException() {
+ firstExitException = null;
}
/**
@@ -65,19 +82,34 @@ public final class ExitUtil {
*/
public static void terminate(int status, String msg) throws ExitException {
LOG.info("Exiting with status " + status);
- terminateCalled = true;
if (systemExitDisabled) {
- throw new ExitException(status, msg);
+ ExitException ee = new ExitException(status, msg);
+ LOG.fatal("Terminate called", ee);
+ if (null == firstExitException) {
+ firstExitException = ee;
+ }
+ throw ee;
}
System.exit(status);
}
/**
+ * Like {@link terminate(int, String)} but uses the given throwable to
+ * initialize the ExitException.
+ * @param status
+ * @param t throwable used to create the ExitException
+ * @throws ExitException if System.exit is disabled for test purposes
+ */
+ public static void terminate(int status, Throwable t) throws ExitException {
+ terminate(status, StringUtils.stringifyException(t));
+ }
+
+ /**
* Like {@link terminate(int, String)} without a message.
* @param status
- * @throws ExitException
+ * @throws ExitException if System.exit is disabled for test purposes
*/
public static void terminate(int status) throws ExitException {
terminate(status, "ExitException");
}
-}
\ No newline at end of file
+}
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java Fri Oct 19 02:25:55 2012
@@ -48,13 +48,12 @@ public final class HeapSort implements I
* Sort the given range of items using heap sort.
* {@inheritDoc}
*/
+ @Override
public void sort(IndexedSortable s, int p, int r) {
sort(s, p, r, null);
}
- /**
- * {@inheritDoc}
- */
+ @Override
public void sort(final IndexedSortable s, final int p, final int r,
final Progressable rep) {
final int N = r - p;
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java Fri Oct 19 02:25:55 2012
@@ -63,7 +63,7 @@ public class HostsFileReader {
// Everything from now on is a comment
break;
}
- if (!nodes[i].equals("")) {
+ if (!nodes[i].isEmpty()) {
LOG.info("Adding " + nodes[i] + " to the list of hosts from " + filename);
set.add(nodes[i]); // might need to add canonical name
}
@@ -80,13 +80,13 @@ public class HostsFileReader {
public synchronized void refresh() throws IOException {
LOG.info("Refreshing hosts (include/exclude) list");
- if (!includesFile.equals("")) {
+ if (!includesFile.isEmpty()) {
Set<String> newIncludes = new HashSet<String>();
readFileToSet(includesFile, newIncludes);
// switch the new hosts that are to be included
includes = newIncludes;
}
- if (!excludesFile.equals("")) {
+ if (!excludesFile.isEmpty()) {
Set<String> newExcludes = new HashSet<String>();
readFileToSet(excludesFile, newExcludes);
// switch the excluded hosts
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java Fri Oct 19 02:25:55 2012
@@ -204,11 +204,13 @@ public class LineReader {
int startPosn = bufferPosn; //starting from where we left off the last time
if (bufferPosn >= bufferLength) {
startPosn = bufferPosn = 0;
- if (prevCharCR)
+ if (prevCharCR) {
++bytesConsumed; //account for CR from previous read
+ }
bufferLength = in.read(buffer);
- if (bufferLength <= 0)
+ if (bufferLength <= 0) {
break; // EOF
+ }
}
for (; bufferPosn < bufferLength; ++bufferPosn) { //search for newline
if (buffer[bufferPosn] == LF) {
@@ -223,8 +225,9 @@ public class LineReader {
prevCharCR = (buffer[bufferPosn] == CR);
}
int readLength = bufferPosn - startPosn;
- if (prevCharCR && newlineLength == 0)
+ if (prevCharCR && newlineLength == 0) {
--readLength; //CR at the end of the buffer
+ }
bytesConsumed += readLength;
int appendLength = readLength - newlineLength;
if (appendLength > maxLineLength - txtLength) {
@@ -236,8 +239,9 @@ public class LineReader {
}
} while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
- if (bytesConsumed > (long)Integer.MAX_VALUE)
- throw new IOException("Too many bytes before newline: " + bytesConsumed);
+ if (bytesConsumed > (long)Integer.MAX_VALUE) {
+ throw new IOException("Too many bytes before newline: " + bytesConsumed);
+ }
return (int)bytesConsumed;
}
@@ -246,18 +250,56 @@ public class LineReader {
*/
private int readCustomLine(Text str, int maxLineLength, int maxBytesToConsume)
throws IOException {
+ /* We're reading data from inputStream, but the head of the stream may be
+ * already captured in the previous buffer, so we have several cases:
+ *
+ * 1. The buffer tail does not contain any character sequence which
+ * matches with the head of delimiter. We count it as a
+ * ambiguous byte count = 0
+ *
+ * 2. The buffer tail contains a X number of characters,
+ * that forms a sequence, which matches with the
+ * head of delimiter. We count ambiguous byte count = X
+ *
+ * // *** eg: A segment of input file is as follows
+ *
+ * " record 1792: I found this bug very interesting and
+ * I have completely read about it. record 1793: This bug
+ * can be solved easily record 1794: This ."
+ *
+ * delimiter = "record";
+ *
+ * supposing:- String at the end of buffer =
+ * "I found this bug very interesting and I have completely re"
+ * There for next buffer = "ad about it. record 179 ...."
+ *
+ * The matching characters in the input
+ * buffer tail and delimiter head = "re"
+ * Therefore, ambiguous byte count = 2 **** //
+ *
+ * 2.1 If the following bytes are the remaining characters of
+ * the delimiter, then we have to capture only up to the starting
+ * position of delimiter. That means, we need not include the
+ * ambiguous characters in str.
+ *
+ * 2.2 If the following bytes are not the remaining characters of
+ * the delimiter ( as mentioned in the example ),
+ * then we have to include the ambiguous characters in str.
+ */
str.clear();
int txtLength = 0; // tracks str.getLength(), as an optimization
long bytesConsumed = 0;
int delPosn = 0;
+ int ambiguousByteCount=0; // To capture the ambiguous characters count
do {
- int startPosn = bufferPosn; // starting from where we left off the last
- // time
+ int startPosn = bufferPosn; // Start from previous end position
if (bufferPosn >= bufferLength) {
startPosn = bufferPosn = 0;
bufferLength = in.read(buffer);
- if (bufferLength <= 0)
+ if (bufferLength <= 0) {
+ str.append(recordDelimiterBytes, 0, ambiguousByteCount);
break; // EOF
+ }
}
for (; bufferPosn < bufferLength; ++bufferPosn) {
if (buffer[bufferPosn] == recordDelimiterBytes[delPosn]) {
@@ -266,7 +308,8 @@ public class LineReader {
bufferPosn++;
break;
}
- } else {
+ } else if (delPosn != 0) {
+ bufferPosn--;
delPosn = 0;
}
}
@@ -277,14 +320,27 @@ public class LineReader {
appendLength = maxLineLength - txtLength;
}
if (appendLength > 0) {
+ if (ambiguousByteCount > 0) {
+ str.append(recordDelimiterBytes, 0, ambiguousByteCount);
+ //appending the ambiguous characters (refer case 2.2)
+ bytesConsumed += ambiguousByteCount;
+ ambiguousByteCount=0;
+ }
str.append(buffer, startPosn, appendLength);
txtLength += appendLength;
}
- } while (delPosn < recordDelimiterBytes.length
+ if (bufferPosn >= bufferLength) {
+ if (delPosn > 0 && delPosn < recordDelimiterBytes.length) {
+ ambiguousByteCount = delPosn;
+ bytesConsumed -= ambiguousByteCount; //to be consumed in next
+ }
+ }
+ } while (delPosn < recordDelimiterBytes.length
&& bytesConsumed < maxBytesToConsume);
- if (bytesConsumed > (long) Integer.MAX_VALUE)
+ if (bytesConsumed > (long) Integer.MAX_VALUE) {
throw new IOException("Too many bytes before delimiter: " + bytesConsumed);
- return (int) bytesConsumed;
+ }
+ return (int) bytesConsumed;
}
/**
@@ -296,7 +352,7 @@ public class LineReader {
*/
public int readLine(Text str, int maxLineLength) throws IOException {
return readLine(str, maxLineLength, Integer.MAX_VALUE);
-}
+ }
/**
* Read from the InputStream into the given Text.
@@ -307,5 +363,4 @@ public class LineReader {
public int readLine(Text str) throws IOException {
return readLine(str, Integer.MAX_VALUE, Integer.MAX_VALUE);
}
-
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java Fri Oct 19 02:25:55 2012
@@ -47,7 +47,7 @@ public class NativeCodeLoader {
}
try {
System.loadLibrary("hadoop");
- LOG.info("Loaded the native-hadoop library");
+ LOG.debug("Loaded the native-hadoop library");
nativeCodeLoaded = true;
} catch (Throwable t) {
// Ignore failure to load
@@ -75,6 +75,11 @@ public class NativeCodeLoader {
}
/**
+ * Returns true only if this build was compiled with support for snappy.
+ */
+ public static native boolean buildSupportsSnappy();
+
+ /**
* Return if native hadoop libraries, if present, can be used for this job.
* @param conf configuration
*
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java Fri Oct 19 02:25:55 2012
@@ -214,6 +214,7 @@ public class Progress {
this.status = status;
}
+ @Override
public String toString() {
StringBuilder result = new StringBuilder();
toString(result);
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java Fri Oct 19 02:25:55 2012
@@ -46,51 +46,77 @@ public class PureJavaCrc32 implements Ch
reset();
}
- /** {@inheritDoc} */
+ @Override
public long getValue() {
return (~crc) & 0xffffffffL;
}
- /** {@inheritDoc} */
+ @Override
public void reset() {
crc = 0xffffffff;
}
- /** {@inheritDoc} */
+ @Override
public void update(byte[] b, int off, int len) {
int localCrc = crc;
+
while(len > 7) {
- int c0 = b[off++] ^ localCrc;
- int c1 = b[off++] ^ (localCrc >>>= 8);
- int c2 = b[off++] ^ (localCrc >>>= 8);
- int c3 = b[off++] ^ (localCrc >>>= 8);
- localCrc = (T8_7[c0 & 0xff] ^ T8_6[c1 & 0xff])
- ^ (T8_5[c2 & 0xff] ^ T8_4[c3 & 0xff]);
+ final int c0 =(b[off+0] ^ localCrc) & 0xff;
+ final int c1 =(b[off+1] ^ (localCrc >>>= 8)) & 0xff;
+ final int c2 =(b[off+2] ^ (localCrc >>>= 8)) & 0xff;
+ final int c3 =(b[off+3] ^ (localCrc >>>= 8)) & 0xff;
+ localCrc = (T[T8_7_start + c0] ^ T[T8_6_start + c1])
+ ^ (T[T8_5_start + c2] ^ T[T8_4_start + c3]);
+
+ final int c4 = b[off+4] & 0xff;
+ final int c5 = b[off+5] & 0xff;
+ final int c6 = b[off+6] & 0xff;
+ final int c7 = b[off+7] & 0xff;
- localCrc ^= (T8_3[b[off++] & 0xff] ^ T8_2[b[off++] & 0xff])
- ^ (T8_1[b[off++] & 0xff] ^ T8_0[b[off++] & 0xff]);
+ localCrc ^= (T[T8_3_start + c4] ^ T[T8_2_start + c5])
+ ^ (T[T8_1_start + c6] ^ T[T8_0_start + c7]);
+ off += 8;
len -= 8;
}
- while(len > 0) {
- localCrc = (localCrc >>> 8) ^ T8_0[(localCrc ^ b[off++]) & 0xff];
- len--;
+
+ /* loop unroll - duff's device style */
+ switch(len) {
+ case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ default:
+ /* nothing */
}
// Publish crc out to object
crc = localCrc;
}
- /** {@inheritDoc} */
+ @Override
final public void update(int b) {
- crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff];
+ crc = (crc >>> 8) ^ T[T8_0_start + ((crc ^ b) & 0xff)];
}
/*
* CRC-32 lookup tables generated by the polynomial 0xEDB88320.
* See also TestPureJavaCrc32.Table.
*/
- private static final int[] T8_0 = new int[] {
+ private static final int T8_0_start = 0*256;
+ private static final int T8_1_start = 1*256;
+ private static final int T8_2_start = 2*256;
+ private static final int T8_3_start = 3*256;
+ private static final int T8_4_start = 4*256;
+ private static final int T8_5_start = 5*256;
+ private static final int T8_6_start = 6*256;
+ private static final int T8_7_start = 7*256;
+
+ private static final int[] T = new int[] {
+ /* T8_0 */
0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA,
0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3,
0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988,
@@ -154,9 +180,8 @@ public class PureJavaCrc32 implements Ch
0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6,
0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF,
0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94,
- 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D
- };
- private static final int[] T8_1 = new int[] {
+ 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D,
+ /* T8_1 */
0x00000000, 0x191B3141, 0x32366282, 0x2B2D53C3,
0x646CC504, 0x7D77F445, 0x565AA786, 0x4F4196C7,
0xC8D98A08, 0xD1C2BB49, 0xFAEFE88A, 0xE3F4D9CB,
@@ -220,9 +245,8 @@ public class PureJavaCrc32 implements Ch
0x14BCE1BD, 0x0DA7D0FC, 0x268A833F, 0x3F91B27E,
0x70D024B9, 0x69CB15F8, 0x42E6463B, 0x5BFD777A,
0xDC656BB5, 0xC57E5AF4, 0xEE530937, 0xF7483876,
- 0xB809AEB1, 0xA1129FF0, 0x8A3FCC33, 0x9324FD72
- };
- private static final int[] T8_2 = new int[] {
+ 0xB809AEB1, 0xA1129FF0, 0x8A3FCC33, 0x9324FD72,
+ /* T8_2 */
0x00000000, 0x01C26A37, 0x0384D46E, 0x0246BE59,
0x0709A8DC, 0x06CBC2EB, 0x048D7CB2, 0x054F1685,
0x0E1351B8, 0x0FD13B8F, 0x0D9785D6, 0x0C55EFE1,
@@ -286,9 +310,8 @@ public class PureJavaCrc32 implements Ch
0xB5C473D0, 0xB40619E7, 0xB640A7BE, 0xB782CD89,
0xB2CDDB0C, 0xB30FB13B, 0xB1490F62, 0xB08B6555,
0xBBD72268, 0xBA15485F, 0xB853F606, 0xB9919C31,
- 0xBCDE8AB4, 0xBD1CE083, 0xBF5A5EDA, 0xBE9834ED
- };
- private static final int[] T8_3 = new int[] {
+ 0xBCDE8AB4, 0xBD1CE083, 0xBF5A5EDA, 0xBE9834ED,
+ /* T8_3 */
0x00000000, 0xB8BC6765, 0xAA09C88B, 0x12B5AFEE,
0x8F629757, 0x37DEF032, 0x256B5FDC, 0x9DD738B9,
0xC5B428EF, 0x7D084F8A, 0x6FBDE064, 0xD7018701,
@@ -352,9 +375,8 @@ public class PureJavaCrc32 implements Ch
0x866616A7, 0x3EDA71C2, 0x2C6FDE2C, 0x94D3B949,
0x090481F0, 0xB1B8E695, 0xA30D497B, 0x1BB12E1E,
0x43D23E48, 0xFB6E592D, 0xE9DBF6C3, 0x516791A6,
- 0xCCB0A91F, 0x740CCE7A, 0x66B96194, 0xDE0506F1
- };
- private static final int[] T8_4 = new int[] {
+ 0xCCB0A91F, 0x740CCE7A, 0x66B96194, 0xDE0506F1,
+ /* T8_4 */
0x00000000, 0x3D6029B0, 0x7AC05360, 0x47A07AD0,
0xF580A6C0, 0xC8E08F70, 0x8F40F5A0, 0xB220DC10,
0x30704BC1, 0x0D106271, 0x4AB018A1, 0x77D03111,
@@ -418,9 +440,8 @@ public class PureJavaCrc32 implements Ch
0x4834505D, 0x755479ED, 0x32F4033D, 0x0F942A8D,
0xBDB4F69D, 0x80D4DF2D, 0xC774A5FD, 0xFA148C4D,
0x78441B9C, 0x4524322C, 0x028448FC, 0x3FE4614C,
- 0x8DC4BD5C, 0xB0A494EC, 0xF704EE3C, 0xCA64C78C
- };
- private static final int[] T8_5 = new int[] {
+ 0x8DC4BD5C, 0xB0A494EC, 0xF704EE3C, 0xCA64C78C,
+ /* T8_5 */
0x00000000, 0xCB5CD3A5, 0x4DC8A10B, 0x869472AE,
0x9B914216, 0x50CD91B3, 0xD659E31D, 0x1D0530B8,
0xEC53826D, 0x270F51C8, 0xA19B2366, 0x6AC7F0C3,
@@ -484,9 +505,8 @@ public class PureJavaCrc32 implements Ch
0x15921919, 0xDECECABC, 0x585AB812, 0x93066BB7,
0x8E035B0F, 0x455F88AA, 0xC3CBFA04, 0x089729A1,
0xF9C19B74, 0x329D48D1, 0xB4093A7F, 0x7F55E9DA,
- 0x6250D962, 0xA90C0AC7, 0x2F987869, 0xE4C4ABCC
- };
- private static final int[] T8_6 = new int[] {
+ 0x6250D962, 0xA90C0AC7, 0x2F987869, 0xE4C4ABCC,
+ /* T8_6 */
0x00000000, 0xA6770BB4, 0x979F1129, 0x31E81A9D,
0xF44F2413, 0x52382FA7, 0x63D0353A, 0xC5A73E8E,
0x33EF4E67, 0x959845D3, 0xA4705F4E, 0x020754FA,
@@ -550,9 +570,8 @@ public class PureJavaCrc32 implements Ch
0x647E3AD9, 0xC209316D, 0xF3E12BF0, 0x55962044,
0x90311ECA, 0x3646157E, 0x07AE0FE3, 0xA1D90457,
0x579174BE, 0xF1E67F0A, 0xC00E6597, 0x66796E23,
- 0xA3DE50AD, 0x05A95B19, 0x34414184, 0x92364A30
- };
- private static final int[] T8_7 = new int[] {
+ 0xA3DE50AD, 0x05A95B19, 0x34414184, 0x92364A30,
+ /* T8_7 */
0x00000000, 0xCCAA009E, 0x4225077D, 0x8E8F07E3,
0x844A0EFA, 0x48E00E64, 0xC66F0987, 0x0AC50919,
0xD3E51BB5, 0x1F4F1B2B, 0x91C01CC8, 0x5D6A1C56,
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java Fri Oct 19 02:25:55 2012
@@ -42,52 +42,78 @@ public class PureJavaCrc32C implements C
reset();
}
- /** {@inheritDoc} */
+ @Override
public long getValue() {
long ret = crc;
return (~ret) & 0xffffffffL;
}
- /** {@inheritDoc} */
+ @Override
public void reset() {
crc = 0xffffffff;
}
- /** {@inheritDoc} */
+ @Override
public void update(byte[] b, int off, int len) {
int localCrc = crc;
+
while(len > 7) {
- int c0 = b[off++] ^ localCrc;
- int c1 = b[off++] ^ (localCrc >>>= 8);
- int c2 = b[off++] ^ (localCrc >>>= 8);
- int c3 = b[off++] ^ (localCrc >>>= 8);
- localCrc = (T8_7[c0 & 0xff] ^ T8_6[c1 & 0xff])
- ^ (T8_5[c2 & 0xff] ^ T8_4[c3 & 0xff]);
+ final int c0 =(b[off+0] ^ localCrc) & 0xff;
+ final int c1 =(b[off+1] ^ (localCrc >>>= 8)) & 0xff;
+ final int c2 =(b[off+2] ^ (localCrc >>>= 8)) & 0xff;
+ final int c3 =(b[off+3] ^ (localCrc >>>= 8)) & 0xff;
+ localCrc = (T[T8_7_start + c0] ^ T[T8_6_start + c1])
+ ^ (T[T8_5_start + c2] ^ T[T8_4_start + c3]);
+
+ final int c4 = b[off+4] & 0xff;
+ final int c5 = b[off+5] & 0xff;
+ final int c6 = b[off+6] & 0xff;
+ final int c7 = b[off+7] & 0xff;
- localCrc ^= (T8_3[b[off++] & 0xff] ^ T8_2[b[off++] & 0xff])
- ^ (T8_1[b[off++] & 0xff] ^ T8_0[b[off++] & 0xff]);
+ localCrc ^= (T[T8_3_start + c4] ^ T[T8_2_start + c5])
+ ^ (T[T8_1_start + c6] ^ T[T8_0_start + c7]);
+ off += 8;
len -= 8;
}
- while(len > 0) {
- localCrc = (localCrc >>> 8) ^ T8_0[(localCrc ^ b[off++]) & 0xff];
- len--;
+
+ /* loop unroll - duff's device style */
+ switch(len) {
+ case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+ default:
+ /* nothing */
}
// Publish crc out to object
crc = localCrc;
}
- /** {@inheritDoc} */
+ @Override
final public void update(int b) {
- crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff];
+ crc = (crc >>> 8) ^ T[T8_0_start + ((crc ^ b) & 0xff)];
}
// CRC polynomial tables generated by:
// java -cp build/test/classes/:build/classes/ \
// org.apache.hadoop.util.TestPureJavaCrc32\$Table 82F63B78
- static final int[] T8_0 = new int[] {
+ private static final int T8_0_start = 0*256;
+ private static final int T8_1_start = 1*256;
+ private static final int T8_2_start = 2*256;
+ private static final int T8_3_start = 3*256;
+ private static final int T8_4_start = 4*256;
+ private static final int T8_5_start = 5*256;
+ private static final int T8_6_start = 6*256;
+ private static final int T8_7_start = 7*256;
+
+ private static final int[] T = new int[] {
+ /* T8_0 */
0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4,
0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB,
0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B,
@@ -151,9 +177,8 @@ public class PureJavaCrc32C implements C
0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81,
0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E,
0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E,
- 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351
- };
- static final int[] T8_1 = new int[] {
+ 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351,
+ /* T8_1 */
0x00000000, 0x13A29877, 0x274530EE, 0x34E7A899,
0x4E8A61DC, 0x5D28F9AB, 0x69CF5132, 0x7A6DC945,
0x9D14C3B8, 0x8EB65BCF, 0xBA51F356, 0xA9F36B21,
@@ -217,9 +242,8 @@ public class PureJavaCrc32C implements C
0x449A2E7E, 0x5738B609, 0x63DF1E90, 0x707D86E7,
0x0A104FA2, 0x19B2D7D5, 0x2D557F4C, 0x3EF7E73B,
0xD98EEDC6, 0xCA2C75B1, 0xFECBDD28, 0xED69455F,
- 0x97048C1A, 0x84A6146D, 0xB041BCF4, 0xA3E32483
- };
- static final int[] T8_2 = new int[] {
+ 0x97048C1A, 0x84A6146D, 0xB041BCF4, 0xA3E32483,
+ /* T8_2 */
0x00000000, 0xA541927E, 0x4F6F520D, 0xEA2EC073,
0x9EDEA41A, 0x3B9F3664, 0xD1B1F617, 0x74F06469,
0x38513EC5, 0x9D10ACBB, 0x773E6CC8, 0xD27FFEB6,
@@ -283,9 +307,8 @@ public class PureJavaCrc32C implements C
0xDDA47104, 0x78E5E37A, 0x92CB2309, 0x378AB177,
0x437AD51E, 0xE63B4760, 0x0C158713, 0xA954156D,
0xE5F54FC1, 0x40B4DDBF, 0xAA9A1DCC, 0x0FDB8FB2,
- 0x7B2BEBDB, 0xDE6A79A5, 0x3444B9D6, 0x91052BA8
- };
- static final int[] T8_3 = new int[] {
+ 0x7B2BEBDB, 0xDE6A79A5, 0x3444B9D6, 0x91052BA8,
+ /* T8_3 */
0x00000000, 0xDD45AAB8, 0xBF672381, 0x62228939,
0x7B2231F3, 0xA6679B4B, 0xC4451272, 0x1900B8CA,
0xF64463E6, 0x2B01C95E, 0x49234067, 0x9466EADF,
@@ -349,9 +372,8 @@ public class PureJavaCrc32C implements C
0xC747336E, 0x1A0299D6, 0x782010EF, 0xA565BA57,
0xBC65029D, 0x6120A825, 0x0302211C, 0xDE478BA4,
0x31035088, 0xEC46FA30, 0x8E647309, 0x5321D9B1,
- 0x4A21617B, 0x9764CBC3, 0xF54642FA, 0x2803E842
- };
- static final int[] T8_4 = new int[] {
+ 0x4A21617B, 0x9764CBC3, 0xF54642FA, 0x2803E842,
+ /* T8_4 */
0x00000000, 0x38116FAC, 0x7022DF58, 0x4833B0F4,
0xE045BEB0, 0xD854D11C, 0x906761E8, 0xA8760E44,
0xC5670B91, 0xFD76643D, 0xB545D4C9, 0x8D54BB65,
@@ -415,9 +437,8 @@ public class PureJavaCrc32C implements C
0xCD796B76, 0xF56804DA, 0xBD5BB42E, 0x854ADB82,
0x2D3CD5C6, 0x152DBA6A, 0x5D1E0A9E, 0x650F6532,
0x081E60E7, 0x300F0F4B, 0x783CBFBF, 0x402DD013,
- 0xE85BDE57, 0xD04AB1FB, 0x9879010F, 0xA0686EA3
- };
- static final int[] T8_5 = new int[] {
+ 0xE85BDE57, 0xD04AB1FB, 0x9879010F, 0xA0686EA3,
+ /* T8_5 */
0x00000000, 0xEF306B19, 0xDB8CA0C3, 0x34BCCBDA,
0xB2F53777, 0x5DC55C6E, 0x697997B4, 0x8649FCAD,
0x6006181F, 0x8F367306, 0xBB8AB8DC, 0x54BAD3C5,
@@ -481,9 +502,8 @@ public class PureJavaCrc32C implements C
0x57F4CA8E, 0xB8C4A197, 0x8C786A4D, 0x63480154,
0xE501FDF9, 0x0A3196E0, 0x3E8D5D3A, 0xD1BD3623,
0x37F2D291, 0xD8C2B988, 0xEC7E7252, 0x034E194B,
- 0x8507E5E6, 0x6A378EFF, 0x5E8B4525, 0xB1BB2E3C
- };
- static final int[] T8_6 = new int[] {
+ 0x8507E5E6, 0x6A378EFF, 0x5E8B4525, 0xB1BB2E3C,
+ /* T8_6 */
0x00000000, 0x68032CC8, 0xD0065990, 0xB8057558,
0xA5E0C5D1, 0xCDE3E919, 0x75E69C41, 0x1DE5B089,
0x4E2DFD53, 0x262ED19B, 0x9E2BA4C3, 0xF628880B,
@@ -547,9 +567,8 @@ public class PureJavaCrc32C implements C
0x2ED97095, 0x46DA5C5D, 0xFEDF2905, 0x96DC05CD,
0x8B39B544, 0xE33A998C, 0x5B3FECD4, 0x333CC01C,
0x60F48DC6, 0x08F7A10E, 0xB0F2D456, 0xD8F1F89E,
- 0xC5144817, 0xAD1764DF, 0x15121187, 0x7D113D4F
- };
- static final int[] T8_7 = new int[] {
+ 0xC5144817, 0xAD1764DF, 0x15121187, 0x7D113D4F,
+ /* T8_7 */
0x00000000, 0x493C7D27, 0x9278FA4E, 0xDB448769,
0x211D826D, 0x6821FF4A, 0xB3657823, 0xFA590504,
0x423B04DA, 0x0B0779FD, 0xD043FE94, 0x997F83B3,
@@ -613,6 +632,6 @@ public class PureJavaCrc32C implements C
0xA777317B, 0xEE4B4C5C, 0x350FCB35, 0x7C33B612,
0x866AB316, 0xCF56CE31, 0x14124958, 0x5D2E347F,
0xE54C35A1, 0xAC704886, 0x7734CFEF, 0x3E08B2C8,
- 0xC451B7CC, 0x8D6DCAEB, 0x56294D82, 0x1F1530A5
+ 0xC451B7CC, 0x8D6DCAEB, 0x56294D82, 0x1F1530A5
};
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java Fri Oct 19 02:25:55 2012
@@ -52,13 +52,12 @@ public final class QuickSort implements
* {@inheritDoc} If the recursion depth falls below {@link #getMaxDepth},
* then switch to {@link HeapSort}.
*/
+ @Override
public void sort(IndexedSortable s, int p, int r) {
sort(s, p, r, null);
}
- /**
- * {@inheritDoc}
- */
+ @Override
public void sort(final IndexedSortable s, int p, int r,
final Progressable rep) {
sortInternal(s, p, r, rep, getMaxDepth(r - p));
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java Fri Oct 19 02:25:55 2012
@@ -205,7 +205,7 @@ public class ReflectionUtils {
boolean dumpStack = false;
if (log.isInfoEnabled()) {
synchronized (ReflectionUtils.class) {
- long now = System.currentTimeMillis();
+ long now = Time.now();
if (now - previousLogTime >= minInterval * 1000) {
previousLogTime = now;
dumpStack = true;
@@ -257,6 +257,7 @@ public class ReflectionUtils {
*/
private static ThreadLocal<CopyInCopyOutBuffer> cloneBuffers
= new ThreadLocal<CopyInCopyOutBuffer>() {
+ @Override
protected synchronized CopyInCopyOutBuffer initialValue() {
return new CopyInCopyOutBuffer();
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Fri Oct 19 02:25:55 2012
@@ -30,7 +30,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
/**
* A base class for running a Unix command.
@@ -124,7 +123,7 @@ abstract public class Shell {
/** check to see if a command needs to be executed and execute if needed */
protected void run() throws IOException {
- if (lastTime + interval > System.currentTimeMillis())
+ if (lastTime + interval > Time.now())
return;
exitCode = 0; // reset for next run
runCommand();
@@ -223,7 +222,7 @@ abstract public class Shell {
LOG.warn("Error while closing the error stream", ioe);
}
process.destroy();
- lastTime = System.currentTimeMillis();
+ lastTime = Time.now();
}
}
@@ -323,10 +322,12 @@ abstract public class Shell {
this.run();
}
+ @Override
public String[] getExecString() {
return command;
}
+ @Override
protected void parseExecResult(BufferedReader lines) throws IOException {
output = new StringBuffer();
char[] buf = new char[512];
@@ -348,6 +349,7 @@ abstract public class Shell {
*
* @return a string representation of the object.
*/
+ @Override
public String toString() {
StringBuilder builder = new StringBuilder();
String[] args = getExecString();
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Fri Oct 19 02:25:55 2012
@@ -34,6 +34,7 @@ import java.util.List;
import java.util.Locale;
import java.util.StringTokenizer;
+import com.google.common.net.InetAddresses;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
@@ -77,6 +78,9 @@ public class StringUtils {
* @return the hostname to the first dot
*/
public static String simpleHostname(String fullHostname) {
+ if (InetAddresses.isInetAddress(fullHostname)) {
+ return fullHostname;
+ }
int offset = fullHostname.indexOf('.');
if (offset != -1) {
return fullHostname.substring(0, offset);
@@ -202,8 +206,12 @@ public class StringUtils {
}
/**
- *
* @param str
+ * The string array to be parsed into an URI array.
+ * @return <tt>null</tt> if str is <tt>null</tt>, else the URI array
+ * equivalent to str.
+ * @throws IllegalArgumentException
+ * If any string in str violates RFC 2396.
*/
public static URI[] stringToURI(String[] str){
if (str == null)
@@ -213,9 +221,8 @@ public class StringUtils {
try{
uris[i] = new URI(str[i]);
}catch(URISyntaxException ur){
- System.out.println("Exception in specified URI's " + StringUtils.stringifyException(ur));
- //making sure its asssigned to null in case of an error
- uris[i] = null;
+ throw new IllegalArgumentException(
+ "Failed to create uri for " + str[i], ur);
}
}
return uris;
@@ -345,7 +352,7 @@ public class StringUtils {
* @return an array of <code>String</code> values
*/
public static String[] getTrimmedStrings(String str){
- if (null == str || "".equals(str.trim())) {
+ if (null == str || str.trim().isEmpty()) {
return emptyStringArray;
}
@@ -405,7 +412,7 @@ public class StringUtils {
String str, char separator) {
// String.split returns a single empty result for splitting the empty
// string.
- if ("".equals(str)) {
+ if (str.isEmpty()) {
return new String[]{""};
}
ArrayList<String> strList = new ArrayList<String>();
@@ -601,7 +608,8 @@ public class StringUtils {
" build = " + VersionInfo.getUrl() + " -r "
+ VersionInfo.getRevision()
+ "; compiled by '" + VersionInfo.getUser()
- + "' on " + VersionInfo.getDate()}
+ + "' on " + VersionInfo.getDate(),
+ " java = " + System.getProperty("java.version") }
)
);
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java Fri Oct 19 02:25:55 2012
@@ -35,10 +35,10 @@ public class ThreadUtil {
* @param millis the number of milliseconds for the current thread to sleep
*/
public static void sleepAtLeastIgnoreInterrupts(long millis) {
- long start = System.currentTimeMillis();
- while (System.currentTimeMillis() - start < millis) {
+ long start = Time.now();
+ while (Time.now() - start < millis) {
long timeToSleep = millis -
- (System.currentTimeMillis() - start);
+ (Time.now() - start);
try {
Thread.sleep(timeToSleep);
} catch (InterruptedException ie) {
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java Fri Oct 19 02:25:55 2012
@@ -193,6 +193,7 @@ public abstract class Filter implements
// Writable interface
+ @Override
public void write(DataOutput out) throws IOException {
out.writeInt(VERSION);
out.writeInt(this.nbHash);
@@ -200,6 +201,7 @@ public abstract class Filter implements
out.writeInt(this.vectorSize);
}
+ @Override
public void readFields(DataInput in) throws IOException {
int ver = in.readInt();
if (ver > 0) { // old unversioned format
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java Fri Oct 19 02:25:55 2012
@@ -154,12 +154,14 @@ public class Key implements WritableComp
// Writable
+ @Override
public void write(DataOutput out) throws IOException {
out.writeInt(bytes.length);
out.write(bytes);
out.writeDouble(weight);
}
+ @Override
public void readFields(DataInput in) throws IOException {
this.bytes = new byte[in.readInt()];
in.readFully(this.bytes);
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java Fri Oct 19 02:25:55 2012
@@ -81,6 +81,7 @@ public class JenkinsHash extends Hash {
* <p>Use for hash table lookup, or anything where one collision in 2^^32 is
* acceptable. Do NOT use for cryptographic purposes.
*/
+ @Override
@SuppressWarnings("fallthrough")
public int hash(byte[] key, int nbytes, int initval) {
int length = nbytes;
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java Fri Oct 19 02:25:55 2012
@@ -37,6 +37,7 @@ public class MurmurHash extends Hash {
return _instance;
}
+ @Override
public int hash(byte[] data, int length, int seed) {
int m = 0x5bd1e995;
int r = 24;
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c Fri Oct 19 02:25:55 2012
@@ -24,7 +24,7 @@
// Simple Functions
//****************************
-extern int LZ4_compress (char* source, char* dest, int isize);
+extern int LZ4_compress (const char* source, char* dest, int isize);
/*
LZ4_compress() :
@@ -88,7 +88,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
compressed_direct_buf_len = LZ4_compress(uncompressed_bytes, compressed_bytes, uncompressed_direct_buf_len);
if (compressed_direct_buf_len < 0){
- THROW(env, "Ljava/lang/InternalError", "LZ4_compress failed");
+ THROW(env, "java/lang/InternalError", "LZ4_compress failed");
}
(*env)->SetIntField(env, thisj, Lz4Compressor_uncompressedDirectBufLen, 0);
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c Fri Oct 19 02:25:55 2012
@@ -20,7 +20,7 @@
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"
-int LZ4_uncompress_unknownOutputSize (char* source, char* dest, int isize, int maxOutputSize);
+int LZ4_uncompress_unknownOutputSize(const char* source, char* dest, int isize, int maxOutputSize);
/*
LZ4_uncompress_unknownOutputSize() :
@@ -85,7 +85,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
uncompressed_direct_buf_len = LZ4_uncompress_unknownOutputSize(compressed_bytes, uncompressed_bytes, compressed_direct_buf_len, uncompressed_direct_buf_len);
if (uncompressed_direct_buf_len < 0) {
- THROW(env, "Ljava/lang/InternalError", "LZ4_uncompress_unknownOutputSize failed.");
+ THROW(env, "java/lang/InternalError", "LZ4_uncompress_unknownOutputSize failed.");
}
(*env)->SetIntField(env, thisj, Lz4Decompressor_compressedDirectBufLen, 0);
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c Fri Oct 19 02:25:55 2012
@@ -25,6 +25,8 @@
#include "org_apache_hadoop_io_compress_snappy.h"
#include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
+#define JINT_MAX 0x7fffffff
+
static jfieldID SnappyCompressor_clazz;
static jfieldID SnappyCompressor_uncompressedDirectBuf;
static jfieldID SnappyCompressor_uncompressedDirectBufLen;
@@ -39,7 +41,7 @@ JNIEXPORT void JNICALL Java_org_apache_h
// Load libsnappy.so
void *libsnappy = dlopen(HADOOP_SNAPPY_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
if (!libsnappy) {
- char* msg = (char*)malloc(1000);
+ char msg[1000];
snprintf(msg, 1000, "%s (%s)!", "Cannot load " HADOOP_SNAPPY_LIBRARY, dlerror());
THROW(env, "java/lang/UnsatisfiedLinkError", msg);
return;
@@ -71,6 +73,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
jint uncompressed_direct_buf_len = (*env)->GetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen);
jobject compressed_direct_buf = (*env)->GetObjectField(env, thisj, SnappyCompressor_compressedDirectBuf);
jint compressed_direct_buf_len = (*env)->GetIntField(env, thisj, SnappyCompressor_directBufferSize);
+ size_t buf_len;
// Get the input direct buffer
LOCK_CLASS(env, clazz, "SnappyCompressor");
@@ -78,7 +81,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
if (uncompressed_bytes == 0) {
- return (jint)0;
+ return 0;
}
// Get the output direct buffer
@@ -87,15 +90,22 @@ JNIEXPORT jint JNICALL Java_org_apache_h
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
if (compressed_bytes == 0) {
- return (jint)0;
+ return 0;
}
- snappy_status ret = dlsym_snappy_compress(uncompressed_bytes, uncompressed_direct_buf_len, compressed_bytes, &compressed_direct_buf_len);
+ /* size_t should always be 4 bytes or larger. */
+ buf_len = (size_t)compressed_direct_buf_len;
+ snappy_status ret = dlsym_snappy_compress(uncompressed_bytes,
+ uncompressed_direct_buf_len, compressed_bytes, &buf_len);
if (ret != SNAPPY_OK){
- THROW(env, "Ljava/lang/InternalError", "Could not compress data. Buffer length is too small.");
+ THROW(env, "java/lang/InternalError", "Could not compress data. Buffer length is too small.");
+ return 0;
+ }
+ if (buf_len > JINT_MAX) {
+ THROW(env, "java/lang/InternalError", "Invalid return buffer length.");
+ return 0;
}
(*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
-
- return (jint)compressed_direct_buf_len;
+ return (jint)buf_len;
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c Fri Oct 19 02:25:55 2012
@@ -92,11 +92,11 @@ JNIEXPORT jint JNICALL Java_org_apache_h
snappy_status ret = dlsym_snappy_uncompress(compressed_bytes, compressed_direct_buf_len, uncompressed_bytes, &uncompressed_direct_buf_len);
if (ret == SNAPPY_BUFFER_TOO_SMALL){
- THROW(env, "Ljava/lang/InternalError", "Could not decompress data. Buffer length is too small.");
+ THROW(env, "java/lang/InternalError", "Could not decompress data. Buffer length is too small.");
} else if (ret == SNAPPY_INVALID_INPUT){
- THROW(env, "Ljava/lang/InternalError", "Could not decompress data. Input is invalid.");
+ THROW(env, "java/lang/InternalError", "Could not decompress data. Input is invalid.");
} else if (ret != SNAPPY_OK){
- THROW(env, "Ljava/lang/InternalError", "Could not decompress data.");
+ THROW(env, "java/lang/InternalError", "Could not decompress data.");
}
(*env)->SetIntField(env, thisj, SnappyDecompressor_compressedDirectBufLen, 0);
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Fri Oct 19 02:25:55 2012
@@ -16,6 +16,8 @@
* limitations under the License.
*/
+#define _GNU_SOURCE
+
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
@@ -366,23 +368,15 @@ Java_org_apache_hadoop_io_nativeio_Nativ
*/
static void throw_ioe(JNIEnv* env, int errnum)
{
- const char* message;
- char buffer[80];
+ char message[80];
jstring jstr_message;
- buffer[0] = 0;
-#ifdef STRERROR_R_CHAR_P
- // GNU strerror_r
- message = strerror_r(errnum, buffer, sizeof(buffer));
- assert (message != NULL);
-#else
- int ret = strerror_r(errnum, buffer, sizeof(buffer));
- if (ret == 0) {
- message = buffer;
+ if ((errnum >= 0) && (errnum < sys_nerr)) {
+ snprintf(message, sizeof(message), "%s", sys_errlist[errnum]);
} else {
- message = "Unknown error";
+ snprintf(message, sizeof(message), "Unknown error %d", errnum);
}
-#endif
+
jobject errno_obj = errno_to_enum(env, errnum);
if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL)
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c Fri Oct 19 02:25:55 2012
@@ -40,8 +40,8 @@ Java_org_apache_hadoop_security_JniBased
(JNIEnv *env, jobject jobj, jstring juser) {
extern int getGroupIDList(const char *user, int *ngroups, gid_t **groups);
extern int getGroupDetails(gid_t group, char **grpBuf);
-
- jobjectArray jgroups;
+ const char *cuser = NULL;
+ jobjectArray jgroups = NULL;
int error = -1;
if (emptyGroups == NULL) {
@@ -56,7 +56,7 @@ Java_org_apache_hadoop_security_JniBased
}
}
char *grpBuf = NULL;
- const char *cuser = (*env)->GetStringUTFChars(env, juser, NULL);
+ cuser = (*env)->GetStringUTFChars(env, juser, NULL);
if (cuser == NULL) {
goto cleanup;
}