You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ss...@apache.org on 2012/10/16 02:03:31 UTC
svn commit: r1398581 [10/14] - in
/hadoop/common/branches/MR-3902/hadoop-common-project:
hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/
hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/
hadoop-auth/sr...
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java Tue Oct 16 00:02:55 2012
@@ -138,6 +138,7 @@ public class TokenMgrError extends Error
*
* from this method for such cases in the release version of your parser.
*/
+ @Override
public String getMessage() {
return super.getMessage();
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java Tue Oct 16 00:02:55 2012
@@ -69,6 +69,7 @@ public class FieldTypeInfo
/**
* Two FieldTypeInfos are equal if ach of their fields matches
*/
+ @Override
public boolean equals(Object o) {
if (this == o)
return true;
@@ -87,6 +88,7 @@ public class FieldTypeInfo
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
+ @Override
public int hashCode() {
return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode();
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java Tue Oct 16 00:02:55 2012
@@ -19,8 +19,6 @@
package org.apache.hadoop.record.meta;
import java.io.IOException;
-import java.util.*;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.record.RecordOutput;
@@ -58,6 +56,7 @@ public class MapTypeID extends TypeID {
return this.typeIDValue;
}
+ @Override
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
typeIDKey.write(rout, tag);
@@ -68,6 +67,7 @@ public class MapTypeID extends TypeID {
* Two map typeIDs are equal if their constituent elements have the
* same type
*/
+ @Override
public boolean equals(Object o) {
if (!super.equals(o))
return false;
@@ -82,6 +82,7 @@ public class MapTypeID extends TypeID {
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
+ @Override
public int hashCode() {
return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode();
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java Tue Oct 16 00:02:55 2012
@@ -122,6 +122,7 @@ public class RecordTypeInfo extends org.
/**
* Serialize the type information for a record
*/
+ @Override
public void serialize(RecordOutput rout, String tag) throws IOException {
// write out any header, version info, here
rout.startRecord(this, tag);
@@ -133,6 +134,7 @@ public class RecordTypeInfo extends org.
/**
* Deserialize the type information for a record
*/
+ @Override
public void deserialize(RecordInput rin, String tag) throws IOException {
// read in any header, version info
rin.startRecord(tag);
@@ -148,6 +150,7 @@ public class RecordTypeInfo extends org.
* So we always throw an exception.
* Not implemented. Always returns 0 if another RecordTypeInfo is passed in.
*/
+ @Override
public int compareTo (final Object peer_) throws ClassCastException {
if (!(peer_ instanceof RecordTypeInfo)) {
throw new ClassCastException("Comparing different types of records.");
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java Tue Oct 16 00:02:55 2012
@@ -72,6 +72,7 @@ public class StructTypeID extends TypeID
return null;
}
+ @Override
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
writeRest(rout, tag);
@@ -155,9 +156,11 @@ public class StructTypeID extends TypeID
}
}
+ @Override
public boolean equals(Object o) {
return super.equals(o);
}
+ @Override
public int hashCode() { return super.hashCode(); }
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java Tue Oct 16 00:02:55 2012
@@ -89,6 +89,7 @@ public class TypeID {
/**
* Two base typeIDs are equal if they refer to the same type
*/
+ @Override
public boolean equals(Object o) {
if (this == o)
return true;
@@ -107,6 +108,7 @@ public class TypeID {
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
+ @Override
public int hashCode() {
// See 'Effectve Java' by Joshua Bloch
return 37*17+(int)typeVal;
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java Tue Oct 16 00:02:55 2012
@@ -43,6 +43,7 @@ public class VectorTypeID extends TypeID
return this.typeIDElement;
}
+ @Override
void write(RecordOutput rout, String tag) throws IOException {
rout.writeByte(typeVal, tag);
typeIDElement.write(rout, tag);
@@ -52,6 +53,7 @@ public class VectorTypeID extends TypeID
* Two vector typeIDs are equal if their constituent elements have the
* same type
*/
+ @Override
public boolean equals(Object o) {
if (!super.equals (o))
return false;
@@ -64,6 +66,7 @@ public class VectorTypeID extends TypeID
* We use a basic hashcode implementation, since this class will likely not
* be used as a hashmap key
*/
+ @Override
public int hashCode() {
return 37*17+typeIDElement.hashCode();
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java Tue Oct 16 00:02:55 2012
@@ -22,7 +22,6 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.security.KerberosInfo;
/**
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java Tue Oct 16 00:02:55 2012
@@ -189,6 +189,7 @@ public class SaslInputStream extends Inp
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public int read() throws IOException {
if (!useWrap) {
return inStream.read();
@@ -220,6 +221,7 @@ public class SaslInputStream extends Inp
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@@ -242,6 +244,7 @@ public class SaslInputStream extends Inp
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public int read(byte[] b, int off, int len) throws IOException {
if (!useWrap) {
return inStream.read(b, off, len);
@@ -286,6 +289,7 @@ public class SaslInputStream extends Inp
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public long skip(long n) throws IOException {
if (!useWrap) {
return inStream.skip(n);
@@ -312,6 +316,7 @@ public class SaslInputStream extends Inp
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public int available() throws IOException {
if (!useWrap) {
return inStream.available();
@@ -329,6 +334,7 @@ public class SaslInputStream extends Inp
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public void close() throws IOException {
disposeSasl();
ostart = 0;
@@ -344,6 +350,7 @@ public class SaslInputStream extends Inp
* @return <code>false</code>, since this class does not support the
* <code>mark</code> and <code>reset</code> methods.
*/
+ @Override
public boolean markSupported() {
return false;
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java Tue Oct 16 00:02:55 2012
@@ -19,9 +19,7 @@
package org.apache.hadoop.security;
import java.io.BufferedOutputStream;
-import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
@@ -122,6 +120,7 @@ public class SaslOutputStream extends Ou
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public void write(int b) throws IOException {
if (!useWrap) {
outStream.write(b);
@@ -146,6 +145,7 @@ public class SaslOutputStream extends Ou
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public void write(byte[] b) throws IOException {
write(b, 0, b.length);
}
@@ -163,6 +163,7 @@ public class SaslOutputStream extends Ou
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public void write(byte[] inBuf, int off, int len) throws IOException {
if (!useWrap) {
outStream.write(inBuf, off, len);
@@ -197,6 +198,7 @@ public class SaslOutputStream extends Ou
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public void flush() throws IOException {
outStream.flush();
}
@@ -208,6 +210,7 @@ public class SaslOutputStream extends Ou
* @exception IOException
* if an I/O error occurs.
*/
+ @Override
public void close() throws IOException {
disposeSasl();
outStream.close();
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java Tue Oct 16 00:02:55 2012
@@ -239,6 +239,7 @@ public class SaslRpcClient {
this.userPassword = SaslRpcServer.encodePassword(token.getPassword());
}
+ @Override
public void handle(Callback[] callbacks)
throws UnsupportedCallbackException {
NameCallback nc = null;
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Tue Oct 16 00:02:55 2012
@@ -194,7 +194,6 @@ public class SaslRpcServer {
return encodePassword(secretManager.retrievePassword(tokenid));
}
- /** {@inheritDoc} */
@Override
public void handle(Callback[] callbacks) throws InvalidToken,
UnsupportedCallbackException {
@@ -253,7 +252,6 @@ public class SaslRpcServer {
@InterfaceStability.Evolving
public static class SaslGssCallbackHandler implements CallbackHandler {
- /** {@inheritDoc} */
@Override
public void handle(Callback[] callbacks) throws
UnsupportedCallbackException {
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Tue Oct 16 00:02:55 2012
@@ -25,6 +25,7 @@ import java.net.URLConnection;
import java.net.UnknownHostException;
import java.security.AccessController;
import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.List;
import java.util.ServiceLoader;
@@ -212,7 +213,7 @@ public class SecurityUtil {
private static String replacePattern(String[] components, String hostname)
throws IOException {
String fqdn = hostname;
- if (fqdn == null || fqdn.equals("") || fqdn.equals("0.0.0.0")) {
+ if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
fqdn = getLocalHostName();
}
return components[0] + "/" + fqdn.toLowerCase() + "@" + components[2];
@@ -451,6 +452,41 @@ public class SecurityUtil {
return action.run();
}
}
+
+ /**
+ * Perform the given action as the daemon's login user. If an
+ * InterruptedException is thrown, it is converted to an IOException.
+ *
+ * @param action the action to perform
+ * @return the result of the action
+ * @throws IOException in the event of error
+ */
+ public static <T> T doAsLoginUser(PrivilegedExceptionAction<T> action)
+ throws IOException {
+ return doAsUser(UserGroupInformation.getLoginUser(), action);
+ }
+
+ /**
+ * Perform the given action as the daemon's current user. If an
+ * InterruptedException is thrown, it is converted to an IOException.
+ *
+ * @param action the action to perform
+ * @return the result of the action
+ * @throws IOException in the event of error
+ */
+ public static <T> T doAsCurrentUser(PrivilegedExceptionAction<T> action)
+ throws IOException {
+ return doAsUser(UserGroupInformation.getCurrentUser(), action);
+ }
+
+ private static <T> T doAsUser(UserGroupInformation ugi,
+ PrivilegedExceptionAction<T> action) throws IOException {
+ try {
+ return ugi.doAs(action);
+ } catch (InterruptedException ie) {
+ throw new IOException(ie);
+ }
+ }
/**
* Open a (if need be) secure connection to a URL in a secure environment
@@ -463,7 +499,7 @@ public class SecurityUtil {
* @throws IOException If unable to authenticate via SPNEGO
*/
public static URLConnection openSecureHttpConnection(URL url) throws IOException {
- if(!UserGroupInformation.isSecurityEnabled()) {
+ if (!HttpConfig.isSecure() && !UserGroupInformation.isSecurityEnabled()) {
return url.openConnection();
}
@@ -498,6 +534,7 @@ public class SecurityUtil {
* Uses standard java host resolution
*/
static class StandardHostResolver implements HostResolver {
+ @Override
public InetAddress getByName(String host) throws UnknownHostException {
return InetAddress.getByName(host);
}
@@ -542,6 +579,7 @@ public class SecurityUtil {
* @return InetAddress with the fully qualified hostname or ip
* @throws UnknownHostException if host does not exist
*/
+ @Override
public InetAddress getByName(String host) throws UnknownHostException {
InetAddress addr = null;
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java Tue Oct 16 00:02:55 2012
@@ -20,10 +20,7 @@ package org.apache.hadoop.security;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
-import java.util.Map;
import java.util.StringTokenizer;
-import java.util.concurrent.ConcurrentHashMap;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java Tue Oct 16 00:02:55 2012
@@ -20,12 +20,6 @@ package org.apache.hadoop.security;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.StringTokenizer;
-import java.util.concurrent.ConcurrentHashMap;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Tue Oct 16 00:02:55 2012
@@ -18,8 +18,9 @@
package org.apache.hadoop.security;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
-import java.io.File;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.AccessControlContext;
@@ -28,12 +29,10 @@ import java.security.Principal;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -55,6 +54,7 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
@@ -194,13 +194,12 @@ public class UserGroupInformation {
private static boolean useKerberos;
/** Server-side groups fetching service */
private static Groups groups;
+ /** Min time (in seconds) before relogin for Kerberos */
+ private static long kerberosMinSecondsBeforeRelogin;
/** The configuration to use */
private static Configuration conf;
- /** Leave 10 minutes between relogin attempts. */
- private static final long MIN_TIME_BEFORE_RELOGIN = 10 * 60 * 1000L;
-
/**Environment variable pointing to the token cache file*/
public static final String HADOOP_TOKEN_FILE_LOCATION =
"HADOOP_TOKEN_FILE_LOCATION";
@@ -247,6 +246,16 @@ public class UserGroupInformation {
HADOOP_SECURITY_AUTHENTICATION +
" of " + value);
}
+ try {
+ kerberosMinSecondsBeforeRelogin = 1000L * conf.getLong(
+ HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN,
+ HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT);
+ }
+ catch(NumberFormatException nfe) {
+ throw new IllegalArgumentException("Invalid attribute value for " +
+ HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN + " of " +
+ conf.get(HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN));
+ }
// If we haven't set up testing groups, use the configuration to find it
if (!(groups instanceof TestingGroups)) {
groups = Groups.getUserToGroupsMappingService(conf);
@@ -343,6 +352,7 @@ public class UserGroupInformation {
this.realUser = realUser;
}
+ @Override
public String getName() {
return realUser.getUserName();
}
@@ -641,14 +651,12 @@ public class UserGroupInformation {
AuthenticationMethod.SIMPLE);
loginUser = new UserGroupInformation(login.getSubject());
String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
- if (fileLocation != null && isSecurityEnabled()) {
+ if (fileLocation != null) {
// load the token storage file and put all of the tokens into the
// user.
Credentials cred = Credentials.readTokenStorageFile(
new Path("file:///" + fileLocation), conf);
- for (Token<?> token: cred.getAllTokens()) {
- loginUser.addToken(token);
- }
+ loginUser.addCredentials(cred);
}
loginUser.spawnAutoRenewalThreadForUserCreds();
} catch (LoginException le) {
@@ -701,6 +709,7 @@ public class UserGroupInformation {
!isKeytab) {
Thread t = new Thread(new Runnable() {
+ @Override
public void run() {
String cmd = conf.get("hadoop.kerberos.kinit.command",
"kinit");
@@ -731,7 +740,7 @@ public class UserGroupInformation {
return;
}
nextRefresh = Math.max(getRefreshTime(tgt),
- now + MIN_TIME_BEFORE_RELOGIN);
+ now + kerberosMinSecondsBeforeRelogin);
} catch (InterruptedException ie) {
LOG.warn("Terminating renewal thread");
return;
@@ -966,10 +975,10 @@ public class UserGroupInformation {
}
private boolean hasSufficientTimeElapsed(long now) {
- if (now - user.getLastLogin() < MIN_TIME_BEFORE_RELOGIN ) {
+ if (now - user.getLastLogin() < kerberosMinSecondsBeforeRelogin ) {
LOG.warn("Not attempting to re-login since the last re-login was " +
- "attempted less than " + (MIN_TIME_BEFORE_RELOGIN/1000) + " seconds"+
- " before.");
+ "attempted less than " + (kerberosMinSecondsBeforeRelogin/1000) +
+ " seconds before.");
return false;
}
return true;
@@ -994,7 +1003,7 @@ public class UserGroupInformation {
@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation createRemoteUser(String user) {
- if (user == null || "".equals(user)) {
+ if (user == null || user.isEmpty()) {
throw new IllegalArgumentException("Null user");
}
Subject subject = new Subject();
@@ -1029,7 +1038,7 @@ public class UserGroupInformation {
@InterfaceStability.Evolving
public static UserGroupInformation createProxyUser(String user,
UserGroupInformation realUser) {
- if (user == null || "".equals(user)) {
+ if (user == null || user.isEmpty()) {
throw new IllegalArgumentException("Null user");
}
if (realUser == null) {
@@ -1185,7 +1194,20 @@ public class UserGroupInformation {
* @return true on successful add of new token
*/
public synchronized boolean addToken(Token<? extends TokenIdentifier> token) {
- return subject.getPrivateCredentials().add(token);
+ return (token != null) ? addToken(token.getService(), token) : false;
+ }
+
+ /**
+ * Add a named token to this UGI
+ *
+ * @param alias Name of the token
+ * @param token Token to be added
+ * @return true on successful add of new token
+ */
+ public synchronized boolean addToken(Text alias,
+ Token<? extends TokenIdentifier> token) {
+ getCredentialsInternal().addToken(alias, token);
+ return true;
}
/**
@@ -1195,14 +1217,38 @@ public class UserGroupInformation {
*/
public synchronized
Collection<Token<? extends TokenIdentifier>> getTokens() {
- Set<Object> creds = subject.getPrivateCredentials();
- List<Token<?>> result = new ArrayList<Token<?>>(creds.size());
- for(Object o: creds) {
- if (o instanceof Token<?>) {
- result.add((Token<?>) o);
- }
+ return Collections.unmodifiableCollection(
+ getCredentialsInternal().getAllTokens());
+ }
+
+ /**
+ * Obtain the tokens in credentials form associated with this user.
+ *
+ * @return Credentials of tokens associated with this user
+ */
+ public synchronized Credentials getCredentials() {
+ return new Credentials(getCredentialsInternal());
+ }
+
+ /**
+ * Add the given Credentials to this user.
+ * @param credentials of tokens and secrets
+ */
+ public synchronized void addCredentials(Credentials credentials) {
+ getCredentialsInternal().addAll(credentials);
+ }
+
+ private synchronized Credentials getCredentialsInternal() {
+ final Credentials credentials;
+ final Set<Credentials> credentialsSet =
+ subject.getPrivateCredentials(Credentials.class);
+ if (!credentialsSet.isEmpty()){
+ credentials = credentialsSet.iterator().next();
+ } else {
+ credentials = new Credentials();
+ subject.getPrivateCredentials().add(credentials);
}
- return Collections.unmodifiableList(result);
+ return credentials;
}
/**
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java Tue Oct 16 00:02:55 2012
@@ -48,6 +48,7 @@ public class AccessControlList implement
WritableFactories.setFactory
(AccessControlList.class,
new WritableFactory() {
+ @Override
public Writable newInstance() { return new AccessControlList(); }
});
}
@@ -318,6 +319,7 @@ public class AccessControlList implement
/**
* Serializes the AccessControlList object
*/
+ @Override
public void write(DataOutput out) throws IOException {
String aclString = getAclString();
Text.writeString(out, aclString);
@@ -326,6 +328,7 @@ public class AccessControlList implement
/**
* Deserializes the AccessControlList object
*/
+ @Override
public void readFields(DataInput in) throws IOException {
String aclString = Text.readString(in);
buildACL(aclString);
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java Tue Oct 16 00:02:55 2012
@@ -42,6 +42,7 @@ public abstract class PolicyProvider {
*/
public static final PolicyProvider DEFAULT_POLICY_PROVIDER =
new PolicyProvider() {
+ @Override
public Service[] getServices() {
return null;
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java Tue Oct 16 00:02:55 2012
@@ -22,7 +22,6 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.security.KerberosInfo;
/**
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Tue Oct 16 00:02:55 2012
@@ -88,7 +88,7 @@ public class ServiceAuthorizationManager
String clientPrincipal = null;
if (krbInfo != null) {
String clientKey = krbInfo.clientPrincipal();
- if (clientKey != null && !clientKey.equals("")) {
+ if (clientKey != null && !clientKey.isEmpty()) {
try {
clientPrincipal = SecurityUtil.getServerPrincipal(
conf.get(clientKey), addr);
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java Tue Oct 16 00:02:55 2012
@@ -124,6 +124,7 @@ public class FileBasedKeyStoresFactory i
* @throws GeneralSecurityException thrown if the keystores could not be
* initialized due to a security error.
*/
+ @Override
public void init(SSLFactory.Mode mode)
throws IOException, GeneralSecurityException {
@@ -159,7 +160,7 @@ public class FileBasedKeyStoresFactory i
} finally {
is.close();
}
- LOG.info(mode.toString() + " Loaded KeyStore: " + keystoreLocation);
+ LOG.debug(mode.toString() + " Loaded KeyStore: " + keystoreLocation);
} else {
keystore.load(null, null);
}
@@ -200,7 +201,7 @@ public class FileBasedKeyStoresFactory i
truststorePassword,
truststoreReloadInterval);
trustManager.init();
- LOG.info(mode.toString() + " Loaded TrustStore: " + truststoreLocation);
+ LOG.debug(mode.toString() + " Loaded TrustStore: " + truststoreLocation);
trustManagers = new TrustManager[]{trustManager};
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java Tue Oct 16 00:02:55 2012
@@ -81,6 +81,7 @@ import javax.net.ssl.SSLSocket;
@InterfaceStability.Evolving
public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
+ @Override
boolean verify(String host, SSLSession session);
void check(String host, SSLSocket ssl) throws IOException;
@@ -125,12 +126,14 @@ public interface SSLHostnameVerifier ext
*/
public final static SSLHostnameVerifier DEFAULT =
new AbstractVerifier() {
+ @Override
public final void check(final String[] hosts, final String[] cns,
final String[] subjectAlts)
throws SSLException {
check(hosts, cns, subjectAlts, false, false);
}
+ @Override
public final String toString() { return "DEFAULT"; }
};
@@ -143,6 +146,7 @@ public interface SSLHostnameVerifier ext
*/
public final static SSLHostnameVerifier DEFAULT_AND_LOCALHOST =
new AbstractVerifier() {
+ @Override
public final void check(final String[] hosts, final String[] cns,
final String[] subjectAlts)
throws SSLException {
@@ -152,6 +156,7 @@ public interface SSLHostnameVerifier ext
check(hosts, cns, subjectAlts, false, false);
}
+ @Override
public final String toString() { return "DEFAULT_AND_LOCALHOST"; }
};
@@ -173,12 +178,14 @@ public interface SSLHostnameVerifier ext
*/
public final static SSLHostnameVerifier STRICT =
new AbstractVerifier() {
+ @Override
public final void check(final String[] host, final String[] cns,
final String[] subjectAlts)
throws SSLException {
check(host, cns, subjectAlts, false, true);
}
+ @Override
public final String toString() { return "STRICT"; }
};
@@ -190,12 +197,14 @@ public interface SSLHostnameVerifier ext
*/
public final static SSLHostnameVerifier STRICT_IE6 =
new AbstractVerifier() {
+ @Override
public final void check(final String[] host, final String[] cns,
final String[] subjectAlts)
throws SSLException {
check(host, cns, subjectAlts, true, true);
}
+ @Override
public final String toString() { return "STRICT_IE6"; }
};
@@ -205,11 +214,13 @@ public interface SSLHostnameVerifier ext
*/
public final static SSLHostnameVerifier ALLOW_ALL =
new AbstractVerifier() {
+ @Override
public final void check(final String[] host, final String[] cns,
final String[] subjectAlts) {
// Allow everything - so never blowup.
}
+ @Override
public final String toString() { return "ALLOW_ALL"; }
};
@@ -250,6 +261,7 @@ public interface SSLHostnameVerifier ext
* @param session SSLSession with the remote server
* @return true if the host matched the one in the certificate.
*/
+ @Override
public boolean verify(String host, SSLSession session) {
try {
Certificate[] certs = session.getPeerCertificates();
@@ -262,20 +274,24 @@ public interface SSLHostnameVerifier ext
}
}
+ @Override
public void check(String host, SSLSocket ssl) throws IOException {
check(new String[]{host}, ssl);
}
+ @Override
public void check(String host, X509Certificate cert)
throws SSLException {
check(new String[]{host}, cert);
}
+ @Override
public void check(String host, String[] cns, String[] subjectAlts)
throws SSLException {
check(new String[]{host}, cns, subjectAlts);
}
+ @Override
public void check(String host[], SSLSocket ssl)
throws IOException {
if (host == null) {
@@ -332,6 +348,7 @@ public interface SSLHostnameVerifier ext
check(host, x509);
}
+ @Override
public void check(String[] host, X509Certificate cert)
throws SSLException {
String[] cns = Certificates.getCNs(cert);
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java Tue Oct 16 00:02:55 2012
@@ -93,7 +93,7 @@ public abstract class SecretManager<T ex
/**
* The length of the random keys to use.
*/
- private static final int KEY_LENGTH = 20;
+ private static final int KEY_LENGTH = 64;
/**
* A thread local store for the Macs.
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java Tue Oct 16 00:02:55 2012
@@ -195,7 +195,7 @@ public class Token<T extends TokenIdenti
service = newService;
}
- /** {@inheritDoc} */
+ @Override
public void readFields(DataInput in) throws IOException {
int len = WritableUtils.readVInt(in);
if (identifier == null || identifier.length != len) {
@@ -211,7 +211,7 @@ public class Token<T extends TokenIdenti
service.readFields(in);
}
- /** {@inheritDoc} */
+ @Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, identifier.length);
out.write(identifier);
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java Tue Oct 16 00:02:55 2012
@@ -85,13 +85,14 @@ extends TokenIdentifier {
*
* @return the username or owner
*/
+ @Override
public UserGroupInformation getUser() {
- if ( (owner == null) || ("".equals(owner.toString()))) {
+ if ( (owner == null) || (owner.toString().isEmpty())) {
return null;
}
final UserGroupInformation realUgi;
final UserGroupInformation ugi;
- if ((realUser == null) || ("".equals(realUser.toString()))
+ if ((realUser == null) || (realUser.toString().isEmpty())
|| realUser.equals(owner)) {
ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
} else {
@@ -150,7 +151,7 @@ extends TokenIdentifier {
return a == null ? b == null : a.equals(b);
}
- /** {@inheritDoc} */
+ @Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
@@ -168,11 +169,12 @@ extends TokenIdentifier {
return false;
}
- /** {@inheritDoc} */
+ @Override
public int hashCode() {
return this.sequenceNumber;
}
+ @Override
public void readFields(DataInput in) throws IOException {
byte version = in.readByte();
if (version != VERSION) {
@@ -200,6 +202,7 @@ extends TokenIdentifier {
WritableUtils.writeVInt(out, masterKeyId);
}
+ @Override
public void write(DataOutput out) throws IOException {
if (owner.getLength() > Text.DEFAULT_MAX_LEN) {
throw new IOException("owner is too long to be serialized!");
@@ -213,6 +216,7 @@ extends TokenIdentifier {
writeImpl(out);
}
+ @Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Tue Oct 16 00:02:55 2012
@@ -265,7 +265,7 @@ extends AbstractDelegationTokenIdentifie
throw new InvalidToken("User " + renewer +
" tried to renew an expired token");
}
- if ((id.getRenewer() == null) || ("".equals(id.getRenewer().toString()))) {
+ if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
throw new AccessControlException("User " + renewer +
" tried to renew a token without " +
"a renewer");
@@ -321,7 +321,7 @@ extends AbstractDelegationTokenIdentifie
HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller);
String cancelerShortName = cancelerKrbName.getShortName();
if (!canceller.equals(owner)
- && (renewer == null || "".equals(renewer.toString()) || !cancelerShortName
+ && (renewer == null || renewer.toString().isEmpty() || !cancelerShortName
.equals(renewer.toString()))) {
throw new AccessControlException(canceller
+ " is not authorized to cancel the token");
@@ -404,6 +404,7 @@ extends AbstractDelegationTokenIdentifie
private long lastMasterKeyUpdate;
private long lastTokenCacheCleanup;
+ @Override
public void run() {
LOG.info("Starting expired delegation token remover thread, "
+ "tokenRemoverScanInterval=" + tokenRemoverScanInterval
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java Tue Oct 16 00:02:55 2012
@@ -91,6 +91,7 @@ public class DelegationKey implements Wr
/**
*/
+ @Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, keyId);
WritableUtils.writeVLong(out, expiryDate);
@@ -104,6 +105,7 @@ public class DelegationKey implements Wr
/**
*/
+ @Override
public void readFields(DataInput in) throws IOException {
keyId = WritableUtils.readVInt(in);
expiryDate = WritableUtils.readVLong(in);
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java Tue Oct 16 00:02:55 2012
@@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.ipc.VersionedProtocol;
/**
* Protocol implemented by the Name Node and Job Tracker which maps users to
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java Tue Oct 16 00:02:55 2012
@@ -71,6 +71,7 @@ public class AsyncDiskService {
public AsyncDiskService(String[] volumes) throws IOException {
threadFactory = new ThreadFactory() {
+ @Override
public Thread newThread(Runnable r) {
return new Thread(threadGroup, r);
}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java Tue Oct 16 00:02:55 2012
@@ -1,452 +1,460 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.util;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.zip.Checksum;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.ChecksumException;
-
-/**
- * This class provides inteface and utilities for processing checksums for
- * DFS data transfers.
- */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-public class DataChecksum implements Checksum {
-
- // Misc constants
- public static final int HEADER_LEN = 5; /// 1 byte type and 4 byte len
-
- // checksum types
- public static final int CHECKSUM_NULL = 0;
- public static final int CHECKSUM_CRC32 = 1;
- public static final int CHECKSUM_CRC32C = 2;
- public static final int CHECKSUM_DEFAULT = 3;
- public static final int CHECKSUM_MIXED = 4;
-
- /** The checksum types */
- public static enum Type {
- NULL (CHECKSUM_NULL, 0),
- CRC32 (CHECKSUM_CRC32, 4),
- CRC32C(CHECKSUM_CRC32C, 4),
- DEFAULT(CHECKSUM_DEFAULT, 0), // This cannot be used to create DataChecksum
- MIXED (CHECKSUM_MIXED, 0); // This cannot be used to create DataChecksum
-
- public final int id;
- public final int size;
-
- private Type(int id, int size) {
- this.id = id;
- this.size = size;
- }
-
- /** @return the type corresponding to the id. */
- public static Type valueOf(int id) {
- if (id < 0 || id >= values().length) {
- throw new IllegalArgumentException("id=" + id
- + " out of range [0, " + values().length + ")");
- }
- return values()[id];
- }
- }
-
-
- public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) {
- if ( bytesPerChecksum <= 0 ) {
- return null;
- }
-
- switch ( type ) {
- case NULL :
- return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum );
- case CRC32 :
- return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum );
- case CRC32C:
- return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum);
- default:
- return null;
- }
- }
-
- /**
- * Creates a DataChecksum from HEADER_LEN bytes from arr[offset].
- * @return DataChecksum of the type in the array or null in case of an error.
- */
- public static DataChecksum newDataChecksum( byte bytes[], int offset ) {
- if ( offset < 0 || bytes.length < offset + HEADER_LEN ) {
- return null;
- }
-
- // like readInt():
- int bytesPerChecksum = ( (bytes[offset+1] & 0xff) << 24 ) |
- ( (bytes[offset+2] & 0xff) << 16 ) |
- ( (bytes[offset+3] & 0xff) << 8 ) |
- ( (bytes[offset+4] & 0xff) );
- return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum );
- }
-
- /**
- * This constructucts a DataChecksum by reading HEADER_LEN bytes from
- * input stream <i>in</i>
- */
- public static DataChecksum newDataChecksum( DataInputStream in )
- throws IOException {
- int type = in.readByte();
- int bpc = in.readInt();
- DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc );
- if ( summer == null ) {
- throw new IOException( "Could not create DataChecksum of type " +
- type + " with bytesPerChecksum " + bpc );
- }
- return summer;
- }
-
- /**
- * Writes the checksum header to the output stream <i>out</i>.
- */
- public void writeHeader( DataOutputStream out )
- throws IOException {
- out.writeByte( type.id );
- out.writeInt( bytesPerChecksum );
- }
-
- public byte[] getHeader() {
- byte[] header = new byte[DataChecksum.HEADER_LEN];
- header[0] = (byte) (type.id & 0xff);
- // Writing in buffer just like DataOutput.WriteInt()
- header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
- header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
- header[1+2] = (byte) ((bytesPerChecksum >>> 8) & 0xff);
- header[1+3] = (byte) (bytesPerChecksum & 0xff);
- return header;
- }
-
- /**
- * Writes the current checksum to the stream.
- * If <i>reset</i> is true, then resets the checksum.
- * @return number of bytes written. Will be equal to getChecksumSize();
- */
- public int writeValue( DataOutputStream out, boolean reset )
- throws IOException {
- if ( type.size <= 0 ) {
- return 0;
- }
-
- if ( type.size == 4 ) {
- out.writeInt( (int) summer.getValue() );
- } else {
- throw new IOException( "Unknown Checksum " + type );
- }
-
- if ( reset ) {
- reset();
- }
-
- return type.size;
- }
-
- /**
- * Writes the current checksum to a buffer.
- * If <i>reset</i> is true, then resets the checksum.
- * @return number of bytes written. Will be equal to getChecksumSize();
- */
- public int writeValue( byte[] buf, int offset, boolean reset )
- throws IOException {
- if ( type.size <= 0 ) {
- return 0;
- }
-
- if ( type.size == 4 ) {
- int checksum = (int) summer.getValue();
- buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
- buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
- buf[offset+2] = (byte) ((checksum >>> 8) & 0xff);
- buf[offset+3] = (byte) (checksum & 0xff);
- } else {
- throw new IOException( "Unknown Checksum " + type );
- }
-
- if ( reset ) {
- reset();
- }
-
- return type.size;
- }
-
- /**
- * Compares the checksum located at buf[offset] with the current checksum.
- * @return true if the checksum matches and false otherwise.
- */
- public boolean compare( byte buf[], int offset ) {
- if ( type.size == 4 ) {
- int checksum = ( (buf[offset+0] & 0xff) << 24 ) |
- ( (buf[offset+1] & 0xff) << 16 ) |
- ( (buf[offset+2] & 0xff) << 8 ) |
- ( (buf[offset+3] & 0xff) );
- return checksum == (int) summer.getValue();
- }
- return type.size == 0;
- }
-
- private final Type type;
- private final Checksum summer;
- private final int bytesPerChecksum;
- private int inSum = 0;
-
- private DataChecksum( Type type, Checksum checksum, int chunkSize ) {
- this.type = type;
- summer = checksum;
- bytesPerChecksum = chunkSize;
- }
-
- // Accessors
- public Type getChecksumType() {
- return type;
- }
- public int getChecksumSize() {
- return type.size;
- }
- public int getBytesPerChecksum() {
- return bytesPerChecksum;
- }
- public int getNumBytesInSum() {
- return inSum;
- }
-
- public static final int SIZE_OF_INTEGER = Integer.SIZE / Byte.SIZE;
- static public int getChecksumHeaderSize() {
- return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int
- }
- //Checksum Interface. Just a wrapper around member summer.
- public long getValue() {
- return summer.getValue();
- }
- public void reset() {
- summer.reset();
- inSum = 0;
- }
- public void update( byte[] b, int off, int len ) {
- if ( len > 0 ) {
- summer.update( b, off, len );
- inSum += len;
- }
- }
- public void update( int b ) {
- summer.update( b );
- inSum += 1;
- }
-
- /**
- * Verify that the given checksums match the given data.
- *
- * The 'mark' of the ByteBuffer parameters may be modified by this function,.
- * but the position is maintained.
- *
- * @param data the DirectByteBuffer pointing to the data to verify.
- * @param checksums the DirectByteBuffer pointing to a series of stored
- * checksums
- * @param fileName the name of the file being read, for error-reporting
- * @param basePos the file position to which the start of 'data' corresponds
- * @throws ChecksumException if the checksums do not match
- */
- public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
- String fileName, long basePos)
- throws ChecksumException {
- if (type.size == 0) return;
-
- if (data.hasArray() && checksums.hasArray()) {
- verifyChunkedSums(
- data.array(), data.arrayOffset() + data.position(), data.remaining(),
- checksums.array(), checksums.arrayOffset() + checksums.position(),
- fileName, basePos);
- return;
- }
- if (NativeCrc32.isAvailable()) {
- NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data,
- fileName, basePos);
- return;
- }
-
- int startDataPos = data.position();
- data.mark();
- checksums.mark();
- try {
- byte[] buf = new byte[bytesPerChecksum];
- byte[] sum = new byte[type.size];
- while (data.remaining() > 0) {
- int n = Math.min(data.remaining(), bytesPerChecksum);
- checksums.get(sum);
- data.get(buf, 0, n);
- summer.reset();
- summer.update(buf, 0, n);
- int calculated = (int)summer.getValue();
- int stored = (sum[0] << 24 & 0xff000000) |
- (sum[1] << 16 & 0xff0000) |
- (sum[2] << 8 & 0xff00) |
- sum[3] & 0xff;
- if (calculated != stored) {
- long errPos = basePos + data.position() - startDataPos - n;
- throw new ChecksumException(
- "Checksum error: "+ fileName + " at "+ errPos +
- " exp: " + stored + " got: " + calculated, errPos);
- }
- }
- } finally {
- data.reset();
- checksums.reset();
- }
- }
-
- /**
- * Implementation of chunked verification specifically on byte arrays. This
- * is to avoid the copy when dealing with ByteBuffers that have array backing.
- */
- private void verifyChunkedSums(
- byte[] data, int dataOff, int dataLen,
- byte[] checksums, int checksumsOff, String fileName,
- long basePos) throws ChecksumException {
-
- int remaining = dataLen;
- int dataPos = 0;
- while (remaining > 0) {
- int n = Math.min(remaining, bytesPerChecksum);
-
- summer.reset();
- summer.update(data, dataOff + dataPos, n);
- dataPos += n;
- remaining -= n;
-
- int calculated = (int)summer.getValue();
- int stored = (checksums[checksumsOff] << 24 & 0xff000000) |
- (checksums[checksumsOff + 1] << 16 & 0xff0000) |
- (checksums[checksumsOff + 2] << 8 & 0xff00) |
- checksums[checksumsOff + 3] & 0xff;
- checksumsOff += 4;
- if (calculated != stored) {
- long errPos = basePos + dataPos - n;
- throw new ChecksumException(
- "Checksum error: "+ fileName + " at "+ errPos +
- " exp: " + stored + " got: " + calculated, errPos);
- }
- }
- }
-
- /**
- * Calculate checksums for the given data.
- *
- * The 'mark' of the ByteBuffer parameters may be modified by this function,
- * but the position is maintained.
- *
- * @param data the DirectByteBuffer pointing to the data to checksum.
- * @param checksums the DirectByteBuffer into which checksums will be
- * stored. Enough space must be available in this
- * buffer to put the checksums.
- */
- public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
- if (type.size == 0) return;
-
- if (data.hasArray() && checksums.hasArray()) {
- calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
- checksums.array(), checksums.arrayOffset() + checksums.position());
- return;
- }
-
- data.mark();
- checksums.mark();
- try {
- byte[] buf = new byte[bytesPerChecksum];
- while (data.remaining() > 0) {
- int n = Math.min(data.remaining(), bytesPerChecksum);
- data.get(buf, 0, n);
- summer.reset();
- summer.update(buf, 0, n);
- checksums.putInt((int)summer.getValue());
- }
- } finally {
- data.reset();
- checksums.reset();
- }
- }
-
- /**
- * Implementation of chunked calculation specifically on byte arrays. This
- * is to avoid the copy when dealing with ByteBuffers that have array backing.
- */
- private void calculateChunkedSums(
- byte[] data, int dataOffset, int dataLength,
- byte[] sums, int sumsOffset) {
-
- int remaining = dataLength;
- while (remaining > 0) {
- int n = Math.min(remaining, bytesPerChecksum);
- summer.reset();
- summer.update(data, dataOffset, n);
- dataOffset += n;
- remaining -= n;
- long calculated = summer.getValue();
- sums[sumsOffset++] = (byte) (calculated >> 24);
- sums[sumsOffset++] = (byte) (calculated >> 16);
- sums[sumsOffset++] = (byte) (calculated >> 8);
- sums[sumsOffset++] = (byte) (calculated);
- }
- }
-
- @Override
- public boolean equals(Object other) {
- if (!(other instanceof DataChecksum)) {
- return false;
- }
- DataChecksum o = (DataChecksum)other;
- return o.bytesPerChecksum == this.bytesPerChecksum &&
- o.type == this.type;
- }
-
- @Override
- public int hashCode() {
- return (this.type.id + 31) * this.bytesPerChecksum;
- }
-
- @Override
- public String toString() {
- return "DataChecksum(type=" + type +
- ", chunkSize=" + bytesPerChecksum + ")";
- }
-
- /**
- * This just provides a dummy implimentation for Checksum class
- * This is used when there is no checksum available or required for
- * data
- */
- static class ChecksumNull implements Checksum {
-
- public ChecksumNull() {}
-
- //Dummy interface
- public long getValue() { return 0; }
- public void reset() {}
- public void update(byte[] b, int off, int len) {}
- public void update(int b) {}
- };
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.zip.Checksum;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.ChecksumException;
+
+/**
+ * This class provides inteface and utilities for processing checksums for
+ * DFS data transfers.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class DataChecksum implements Checksum {
+
+ // Misc constants
+ public static final int HEADER_LEN = 5; /// 1 byte type and 4 byte len
+
+ // checksum types
+ public static final int CHECKSUM_NULL = 0;
+ public static final int CHECKSUM_CRC32 = 1;
+ public static final int CHECKSUM_CRC32C = 2;
+ public static final int CHECKSUM_DEFAULT = 3;
+ public static final int CHECKSUM_MIXED = 4;
+
+ /** The checksum types */
+ public static enum Type {
+ NULL (CHECKSUM_NULL, 0),
+ CRC32 (CHECKSUM_CRC32, 4),
+ CRC32C(CHECKSUM_CRC32C, 4),
+ DEFAULT(CHECKSUM_DEFAULT, 0), // This cannot be used to create DataChecksum
+ MIXED (CHECKSUM_MIXED, 0); // This cannot be used to create DataChecksum
+
+ public final int id;
+ public final int size;
+
+ private Type(int id, int size) {
+ this.id = id;
+ this.size = size;
+ }
+
+ /** @return the type corresponding to the id. */
+ public static Type valueOf(int id) {
+ if (id < 0 || id >= values().length) {
+ throw new IllegalArgumentException("id=" + id
+ + " out of range [0, " + values().length + ")");
+ }
+ return values()[id];
+ }
+ }
+
+
+ public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) {
+ if ( bytesPerChecksum <= 0 ) {
+ return null;
+ }
+
+ switch ( type ) {
+ case NULL :
+ return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum );
+ case CRC32 :
+ return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum );
+ case CRC32C:
+ return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum);
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Creates a DataChecksum from HEADER_LEN bytes from arr[offset].
+ * @return DataChecksum of the type in the array or null in case of an error.
+ */
+ public static DataChecksum newDataChecksum( byte bytes[], int offset ) {
+ if ( offset < 0 || bytes.length < offset + HEADER_LEN ) {
+ return null;
+ }
+
+ // like readInt():
+ int bytesPerChecksum = ( (bytes[offset+1] & 0xff) << 24 ) |
+ ( (bytes[offset+2] & 0xff) << 16 ) |
+ ( (bytes[offset+3] & 0xff) << 8 ) |
+ ( (bytes[offset+4] & 0xff) );
+ return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum );
+ }
+
+ /**
+ * This constructucts a DataChecksum by reading HEADER_LEN bytes from
+ * input stream <i>in</i>
+ */
+ public static DataChecksum newDataChecksum( DataInputStream in )
+ throws IOException {
+ int type = in.readByte();
+ int bpc = in.readInt();
+ DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc );
+ if ( summer == null ) {
+ throw new IOException( "Could not create DataChecksum of type " +
+ type + " with bytesPerChecksum " + bpc );
+ }
+ return summer;
+ }
+
+ /**
+ * Writes the checksum header to the output stream <i>out</i>.
+ */
+ public void writeHeader( DataOutputStream out )
+ throws IOException {
+ out.writeByte( type.id );
+ out.writeInt( bytesPerChecksum );
+ }
+
+ public byte[] getHeader() {
+ byte[] header = new byte[DataChecksum.HEADER_LEN];
+ header[0] = (byte) (type.id & 0xff);
+ // Writing in buffer just like DataOutput.WriteInt()
+ header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
+ header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
+ header[1+2] = (byte) ((bytesPerChecksum >>> 8) & 0xff);
+ header[1+3] = (byte) (bytesPerChecksum & 0xff);
+ return header;
+ }
+
+ /**
+ * Writes the current checksum to the stream.
+ * If <i>reset</i> is true, then resets the checksum.
+ * @return number of bytes written. Will be equal to getChecksumSize();
+ */
+ public int writeValue( DataOutputStream out, boolean reset )
+ throws IOException {
+ if ( type.size <= 0 ) {
+ return 0;
+ }
+
+ if ( type.size == 4 ) {
+ out.writeInt( (int) summer.getValue() );
+ } else {
+ throw new IOException( "Unknown Checksum " + type );
+ }
+
+ if ( reset ) {
+ reset();
+ }
+
+ return type.size;
+ }
+
+ /**
+ * Writes the current checksum to a buffer.
+ * If <i>reset</i> is true, then resets the checksum.
+ * @return number of bytes written. Will be equal to getChecksumSize();
+ */
+ public int writeValue( byte[] buf, int offset, boolean reset )
+ throws IOException {
+ if ( type.size <= 0 ) {
+ return 0;
+ }
+
+ if ( type.size == 4 ) {
+ int checksum = (int) summer.getValue();
+ buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
+ buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
+ buf[offset+2] = (byte) ((checksum >>> 8) & 0xff);
+ buf[offset+3] = (byte) (checksum & 0xff);
+ } else {
+ throw new IOException( "Unknown Checksum " + type );
+ }
+
+ if ( reset ) {
+ reset();
+ }
+
+ return type.size;
+ }
+
+ /**
+ * Compares the checksum located at buf[offset] with the current checksum.
+ * @return true if the checksum matches and false otherwise.
+ */
+ public boolean compare( byte buf[], int offset ) {
+ if ( type.size == 4 ) {
+ int checksum = ( (buf[offset+0] & 0xff) << 24 ) |
+ ( (buf[offset+1] & 0xff) << 16 ) |
+ ( (buf[offset+2] & 0xff) << 8 ) |
+ ( (buf[offset+3] & 0xff) );
+ return checksum == (int) summer.getValue();
+ }
+ return type.size == 0;
+ }
+
+ private final Type type;
+ private final Checksum summer;
+ private final int bytesPerChecksum;
+ private int inSum = 0;
+
+ private DataChecksum( Type type, Checksum checksum, int chunkSize ) {
+ this.type = type;
+ summer = checksum;
+ bytesPerChecksum = chunkSize;
+ }
+
+ // Accessors
+ public Type getChecksumType() {
+ return type;
+ }
+ public int getChecksumSize() {
+ return type.size;
+ }
+ public int getBytesPerChecksum() {
+ return bytesPerChecksum;
+ }
+ public int getNumBytesInSum() {
+ return inSum;
+ }
+
+ public static final int SIZE_OF_INTEGER = Integer.SIZE / Byte.SIZE;
+ static public int getChecksumHeaderSize() {
+ return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int
+ }
+ //Checksum Interface. Just a wrapper around member summer.
+ @Override
+ public long getValue() {
+ return summer.getValue();
+ }
+ @Override
+ public void reset() {
+ summer.reset();
+ inSum = 0;
+ }
+ @Override
+ public void update( byte[] b, int off, int len ) {
+ if ( len > 0 ) {
+ summer.update( b, off, len );
+ inSum += len;
+ }
+ }
+ @Override
+ public void update( int b ) {
+ summer.update( b );
+ inSum += 1;
+ }
+
+ /**
+ * Verify that the given checksums match the given data.
+ *
+ * The 'mark' of the ByteBuffer parameters may be modified by this function,.
+ * but the position is maintained.
+ *
+ * @param data the DirectByteBuffer pointing to the data to verify.
+ * @param checksums the DirectByteBuffer pointing to a series of stored
+ * checksums
+ * @param fileName the name of the file being read, for error-reporting
+ * @param basePos the file position to which the start of 'data' corresponds
+ * @throws ChecksumException if the checksums do not match
+ */
+ public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
+ String fileName, long basePos)
+ throws ChecksumException {
+ if (type.size == 0) return;
+
+ if (data.hasArray() && checksums.hasArray()) {
+ verifyChunkedSums(
+ data.array(), data.arrayOffset() + data.position(), data.remaining(),
+ checksums.array(), checksums.arrayOffset() + checksums.position(),
+ fileName, basePos);
+ return;
+ }
+ if (NativeCrc32.isAvailable()) {
+ NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data,
+ fileName, basePos);
+ return;
+ }
+
+ int startDataPos = data.position();
+ data.mark();
+ checksums.mark();
+ try {
+ byte[] buf = new byte[bytesPerChecksum];
+ byte[] sum = new byte[type.size];
+ while (data.remaining() > 0) {
+ int n = Math.min(data.remaining(), bytesPerChecksum);
+ checksums.get(sum);
+ data.get(buf, 0, n);
+ summer.reset();
+ summer.update(buf, 0, n);
+ int calculated = (int)summer.getValue();
+ int stored = (sum[0] << 24 & 0xff000000) |
+ (sum[1] << 16 & 0xff0000) |
+ (sum[2] << 8 & 0xff00) |
+ sum[3] & 0xff;
+ if (calculated != stored) {
+ long errPos = basePos + data.position() - startDataPos - n;
+ throw new ChecksumException(
+ "Checksum error: "+ fileName + " at "+ errPos +
+ " exp: " + stored + " got: " + calculated, errPos);
+ }
+ }
+ } finally {
+ data.reset();
+ checksums.reset();
+ }
+ }
+
+ /**
+ * Implementation of chunked verification specifically on byte arrays. This
+ * is to avoid the copy when dealing with ByteBuffers that have array backing.
+ */
+ private void verifyChunkedSums(
+ byte[] data, int dataOff, int dataLen,
+ byte[] checksums, int checksumsOff, String fileName,
+ long basePos) throws ChecksumException {
+
+ int remaining = dataLen;
+ int dataPos = 0;
+ while (remaining > 0) {
+ int n = Math.min(remaining, bytesPerChecksum);
+
+ summer.reset();
+ summer.update(data, dataOff + dataPos, n);
+ dataPos += n;
+ remaining -= n;
+
+ int calculated = (int)summer.getValue();
+ int stored = (checksums[checksumsOff] << 24 & 0xff000000) |
+ (checksums[checksumsOff + 1] << 16 & 0xff0000) |
+ (checksums[checksumsOff + 2] << 8 & 0xff00) |
+ checksums[checksumsOff + 3] & 0xff;
+ checksumsOff += 4;
+ if (calculated != stored) {
+ long errPos = basePos + dataPos - n;
+ throw new ChecksumException(
+ "Checksum error: "+ fileName + " at "+ errPos +
+ " exp: " + stored + " got: " + calculated, errPos);
+ }
+ }
+ }
+
+ /**
+ * Calculate checksums for the given data.
+ *
+ * The 'mark' of the ByteBuffer parameters may be modified by this function,
+ * but the position is maintained.
+ *
+ * @param data the DirectByteBuffer pointing to the data to checksum.
+ * @param checksums the DirectByteBuffer into which checksums will be
+ * stored. Enough space must be available in this
+ * buffer to put the checksums.
+ */
+ public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
+ if (type.size == 0) return;
+
+ if (data.hasArray() && checksums.hasArray()) {
+ calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
+ checksums.array(), checksums.arrayOffset() + checksums.position());
+ return;
+ }
+
+ data.mark();
+ checksums.mark();
+ try {
+ byte[] buf = new byte[bytesPerChecksum];
+ while (data.remaining() > 0) {
+ int n = Math.min(data.remaining(), bytesPerChecksum);
+ data.get(buf, 0, n);
+ summer.reset();
+ summer.update(buf, 0, n);
+ checksums.putInt((int)summer.getValue());
+ }
+ } finally {
+ data.reset();
+ checksums.reset();
+ }
+ }
+
+ /**
+ * Implementation of chunked calculation specifically on byte arrays. This
+ * is to avoid the copy when dealing with ByteBuffers that have array backing.
+ */
+ private void calculateChunkedSums(
+ byte[] data, int dataOffset, int dataLength,
+ byte[] sums, int sumsOffset) {
+
+ int remaining = dataLength;
+ while (remaining > 0) {
+ int n = Math.min(remaining, bytesPerChecksum);
+ summer.reset();
+ summer.update(data, dataOffset, n);
+ dataOffset += n;
+ remaining -= n;
+ long calculated = summer.getValue();
+ sums[sumsOffset++] = (byte) (calculated >> 24);
+ sums[sumsOffset++] = (byte) (calculated >> 16);
+ sums[sumsOffset++] = (byte) (calculated >> 8);
+ sums[sumsOffset++] = (byte) (calculated);
+ }
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof DataChecksum)) {
+ return false;
+ }
+ DataChecksum o = (DataChecksum)other;
+ return o.bytesPerChecksum == this.bytesPerChecksum &&
+ o.type == this.type;
+ }
+
+ @Override
+ public int hashCode() {
+ return (this.type.id + 31) * this.bytesPerChecksum;
+ }
+
+ @Override
+ public String toString() {
+ return "DataChecksum(type=" + type +
+ ", chunkSize=" + bytesPerChecksum + ")";
+ }
+
+ /**
+ * This just provides a dummy implimentation for Checksum class
+ * This is used when there is no checksum available or required for
+ * data
+ */
+ static class ChecksumNull implements Checksum {
+
+ public ChecksumNull() {}
+
+ //Dummy interface
+ @Override
+ public long getValue() { return 0; }
+ @Override
+ public void reset() {}
+ @Override
+ public void update(byte[] b, int off, int len) {}
+ @Override
+ public void update(int b) {}
+ };
+}
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java Tue Oct 16 00:02:55 2012
@@ -101,7 +101,7 @@ public final class ExitUtil {
* @throws ExitException if System.exit is disabled for test purposes
*/
public static void terminate(int status, Throwable t) throws ExitException {
- terminate(status, t.getMessage());
+ terminate(status, StringUtils.stringifyException(t));
}
/**
Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java Tue Oct 16 00:02:55 2012
@@ -48,13 +48,12 @@ public final class HeapSort implements I
* Sort the given range of items using heap sort.
* {@inheritDoc}
*/
+ @Override
public void sort(IndexedSortable s, int p, int r) {
sort(s, p, r, null);
}
- /**
- * {@inheritDoc}
- */
+ @Override
public void sort(final IndexedSortable s, final int p, final int r,
final Progressable rep) {
final int N = r - p;