You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ki...@apache.org on 2020/01/24 17:08:47 UTC
[hadoop] 01/02: HADOOP-16580. Disable retry of
FailoverOnNetworkExceptionRetry in case of AccessControlException.
Contributed by Adam Antal
This is an automated email from the ASF dual-hosted git repository.
kihwal pushed a commit to branch branch-2.10
in repository https://gitbox.apache.org/repos/asf/hadoop.git
commit 2873a705f4b06b80e0685b6593048d55a51d15c1
Author: Kihwal Lee <ki...@apache.org>
AuthorDate: Fri Jan 24 11:07:06 2020 -0600
HADOOP-16580. Disable retry of FailoverOnNetworkExceptionRetry in case of AccessControlException. Contributed by Adam Antal
(cherry picked from commit c79a5f2d9930f58ad95864c59cd0a6164cd53280)
---
.../org/apache/hadoop/io/retry/RetryPolicies.java | 4 +++
.../org/apache/hadoop/io/retry/TestRetryProxy.java | 29 ++++++++++++++++++++++
.../hadoop/io/retry/UnreliableImplementation.java | 16 ++++++++++++
.../hadoop/io/retry/UnreliableInterface.java | 17 +++++++++++++
4 files changed, 66 insertions(+)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
index 47efbbc..a89c3a7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.RetriableException;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.net.ConnectTimeoutException;
+import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.ietf.jgss.GSSException;
@@ -689,6 +690,9 @@ public class RetryPolicies {
} else if (e instanceof InvalidToken) {
return new RetryAction(RetryAction.RetryDecision.FAIL, 0,
"Invalid or Cancelled Token");
+ } else if (e instanceof AccessControlException) {
+ return new RetryAction(RetryAction.RetryDecision.FAIL, 0,
+ "Access denied");
} else if (e instanceof SocketException
|| (e instanceof IOException && !(e instanceof RemoteException))) {
if (isIdempotentOrAtMostOnce) {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
index 1accb0a0..2116fb2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.io.retry.UnreliableInterface.FatalException;
import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
import org.apache.hadoop.ipc.ProtocolTranslator;
import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.AccessControlException;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
@@ -48,6 +49,14 @@ import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.*;
+/**
+ * TestRetryProxy tests the behaviour of the {@link RetryPolicy} class using
+ * a certain method of {@link UnreliableInterface} implemented by
+ * {@link UnreliableImplementation}.
+ *
+ * Some methods may be sensitive to the {@link Idempotent} annotation
+ * (annotated in {@link UnreliableInterface}).
+ */
public class TestRetryProxy {
private UnreliableImplementation unreliableImpl;
@@ -348,4 +357,24 @@ public class TestRetryProxy {
assertEquals(RetryDecision.FAIL, caughtRetryAction.action);
}
}
+
+ @Test
+ public void testNoRetryOnAccessControlException() throws Exception {
+ RetryPolicy policy = mock(RetryPolicy.class);
+ RetryPolicy realPolicy = RetryPolicies.failoverOnNetworkException(5);
+ setupMockPolicy(policy, realPolicy);
+
+ UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(
+ UnreliableInterface.class, unreliableImpl, policy);
+
+ try {
+ unreliable.failsWithAccessControlExceptionEightTimes();
+ fail("Should fail");
+ } catch (AccessControlException e) {
+ // expected
+ verify(policy, times(1)).shouldRetry(any(Exception.class), anyInt(),
+ anyInt(), anyBoolean());
+ assertEquals(RetryDecision.FAIL, caughtRetryAction.action);
+ }
+ }
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
index 85bd598..97031fd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
@@ -23,7 +23,14 @@ import javax.security.sasl.SaslException;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
+import org.apache.hadoop.security.AccessControlException;
+/**
+ * For the usage and purpose of this class see {@link UnreliableInterface}
+ * which this class implements.
+ *
+ * @see UnreliableInterface
+ */
class UnreliableImplementation implements UnreliableInterface {
private int failsOnceInvocationCount,
@@ -32,6 +39,7 @@ class UnreliableImplementation implements UnreliableInterface {
failsOnceRemoteExceptionInvocationCount,
failsTenTimesInvocationCount,
failsWithSASLExceptionTenTimesInvocationCount,
+ failsWithAccessControlExceptionInvocationCount,
succeedsOnceThenFailsCount,
succeedsOnceThenFailsIdempotentCount,
succeedsTenTimesThenFailsCount;
@@ -124,6 +132,14 @@ class UnreliableImplementation implements UnreliableInterface {
}
@Override
+ public void failsWithAccessControlExceptionEightTimes()
+ throws AccessControlException {
+ if (failsWithAccessControlExceptionInvocationCount++ < 8) {
+ throw new AccessControlException();
+ }
+ }
+
+ @Override
public String succeedsOnceThenFailsReturningString()
throws UnreliableException, IOException, StandbyException {
if (succeedsOnceThenFailsCount++ < 1) {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
index d334542..738a760 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
@@ -24,7 +24,20 @@ import javax.security.sasl.SaslException;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
+import org.apache.hadoop.security.AccessControlException;
+/**
+ * The methods of UnreliableInterface could throw exceptions in a
+ * predefined way. It is currently used for testing {@link RetryPolicy}
+ * and {@link FailoverProxyProvider} classes, but can be potentially used
+ * to test any class's behaviour where an underlying interface or class
+ * may throw exceptions.
+ *
+ * Some methods may be annotated with the {@link Idempotent} annotation.
+ * In order to test those some methods of UnreliableInterface are annotated,
+ * but they are not actually Idempotent functions.
+ *
+ */
public interface UnreliableInterface {
public static class UnreliableException extends Exception {
@@ -66,6 +79,10 @@ public interface UnreliableInterface {
void failsWithSASLExceptionTenTimes() throws SaslException;
+ @Idempotent
+ void failsWithAccessControlExceptionEightTimes()
+ throws AccessControlException;
+
public String succeedsOnceThenFailsReturningString()
throws UnreliableException, StandbyException, IOException;
@Idempotent
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org