You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2007/05/25 23:36:23 UTC

svn commit: r541785 - in /lucene/hadoop/branches/branch-0.13: ./ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/io/retry/ src/test/org/apache/hadoop/io/retry/

Author: tomwhite
Date: Fri May 25 14:36:22 2007
New Revision: 541785

URL: http://svn.apache.org/viewvc?view=rev&rev=541785
Log:
Merge -r 541782:541783 from trunk to 0.13 branch. Fixes: HADOOP-1411.

Modified:
    lucene/hadoop/branches/branch-0.13/CHANGES.txt
    lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/dfs/DFSClient.java
    lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
    lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryPolicies.java
    lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/TestRetryProxy.java
    lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableImplementation.java
    lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableInterface.java

Modified: lucene/hadoop/branches/branch-0.13/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/CHANGES.txt?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/CHANGES.txt (original)
+++ lucene/hadoop/branches/branch-0.13/CHANGES.txt Fri May 25 14:36:22 2007
@@ -426,6 +426,10 @@
      to indicate the default directory, per HADOOP-1386.
      (Hairong Kuang via cutting)
 
+128. HADOOP-1411.  Make task retry framework handle 
+     AlreadyBeingCreatedException when wrapped as a RemoteException.
+     (Hairong Kuang via tomwhite)
+
 
 Release 0.12.3 - 2007-04-06
 

Modified: lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/dfs/DFSClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/dfs/DFSClient.java?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/dfs/DFSClient.java (original)
+++ lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/dfs/DFSClient.java Fri May 25 14:36:22 2007
@@ -106,11 +106,16 @@
     RetryPolicy createPolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep(
         5, LEASE_SOFTLIMIT_PERIOD, TimeUnit.MILLISECONDS);
     
+    Map<Class<? extends Exception>,RetryPolicy> remoteExceptionToPolicyMap =
+      new HashMap<Class<? extends Exception>, RetryPolicy>();
+    remoteExceptionToPolicyMap.put(AlreadyBeingCreatedException.class, createPolicy);
+
     Map<Class<? extends Exception>,RetryPolicy> exceptionToPolicyMap =
       new HashMap<Class<? extends Exception>, RetryPolicy>();
+    exceptionToPolicyMap.put(RemoteException.class, 
+        RetryPolicies.retryByRemoteException(
+            RetryPolicies.TRY_ONCE_THEN_FAIL, remoteExceptionToPolicyMap));
     exceptionToPolicyMap.put(SocketTimeoutException.class, timeoutPolicy);
-    exceptionToPolicyMap.put(AlreadyBeingCreatedException.class, createPolicy);
-
     RetryPolicy methodPolicy = RetryPolicies.retryByException(
         RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap);
     Map<String,RetryPolicy> methodNameToPolicyMap = new HashMap<String,RetryPolicy>();

Modified: lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java (original)
+++ lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java Fri May 25 14:36:22 2007
@@ -59,7 +59,7 @@
         return invokeMethod(method, args);
       } catch (Exception e) {
         if (!policy.shouldRetry(e, retries++)) {
-          LOG.warn("Exception while invoking " + method.getName()
+          LOG.info("Exception while invoking " + method.getName()
                    + " of " + implementation.getClass() + ". Not retrying."
                    + StringUtils.stringifyException(e));
           if (!method.getReturnType().equals(Void.TYPE)) {
@@ -67,7 +67,7 @@
           }
           return null;
         }
-        LOG.info("Exception while invoking " + method.getName()
+        LOG.debug("Exception while invoking " + method.getName()
                  + " of " + implementation.getClass() + ". Retrying."
                  + StringUtils.stringifyException(e));
       }

Modified: lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryPolicies.java?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryPolicies.java (original)
+++ lucene/hadoop/branches/branch-0.13/src/java/org/apache/hadoop/io/retry/RetryPolicies.java Fri May 25 14:36:22 2007
@@ -17,10 +17,15 @@
  */
 package org.apache.hadoop.io.retry;
 
+import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
+import java.util.Set;
+import java.util.Map.Entry;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.ipc.RemoteException;
+
 /**
  * <p>
  * A collection of useful implementations of {@link RetryPolicy}.
@@ -83,10 +88,19 @@
     return new RetryUpToMaximumCountWithProportionalSleep(maxRetries, sleepTime, timeUnit);
   }
   
+  /**
+   * <p>
+   * Keep trying a limited number of times, waiting a growing amount of time between attempts,
+   * and then fail by re-throwing the exception.
+   * The time between attempts is <code>sleepTime</code> mutliplied by a random
+   * number in the range of [0, 2 to the number of retries)
+   * </p>
+   */
   public static final RetryPolicy exponentialBackoffRetry(
       int maxRetries, long sleepTime, TimeUnit timeUnit) {
     return new ExponentialBackoffRetry(maxRetries, sleepTime, timeUnit);
   }
+  
   /**
    * <p>
    * Set a default policy with some explicit handlers for specific exceptions.
@@ -97,6 +111,18 @@
     return new ExceptionDependentRetry(defaultPolicy, exceptionToPolicyMap);
   }
   
+  /**
+   * <p>
+   * A retry policy for RemoteException
+   * Set a default policy with some explicit handlers for specific exceptions.
+   * </p>
+   */
+  public static final RetryPolicy retryByRemoteException(
+      RetryPolicy defaultPolicy,
+      Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap) {
+    return new RemoteExceptionDependentRetry(defaultPolicy, exceptionToPolicyMap);
+  }
+  
   static class TryOnceThenFail implements RetryPolicy {
     public boolean shouldRetry(Exception e, int retries) throws Exception {
       throw e;
@@ -187,6 +213,35 @@
       return policy.shouldRetry(e, retries);
     }
     
+  }
+  
+  static class RemoteExceptionDependentRetry implements RetryPolicy {
+
+    RetryPolicy defaultPolicy;
+    Map<String, RetryPolicy> exceptionNameToPolicyMap;
+    
+    public RemoteExceptionDependentRetry(RetryPolicy defaultPolicy,
+                                   Map<Class<? extends Exception>,
+                                   RetryPolicy> exceptionToPolicyMap) {
+      this.defaultPolicy = defaultPolicy;
+      this.exceptionNameToPolicyMap = new HashMap<String, RetryPolicy>();
+      for (Entry<Class<? extends Exception>, RetryPolicy> e :
+          exceptionToPolicyMap.entrySet()) {
+        exceptionNameToPolicyMap.put(e.getKey().getName(), e.getValue());
+      }
+    }
+
+    public boolean shouldRetry(Exception e, int retries) throws Exception {
+      RetryPolicy policy = null;
+      if (e instanceof RemoteException) {
+        policy = exceptionNameToPolicyMap.get(
+            ((RemoteException) e).getClassName());
+      }
+      if (policy == null) {
+        policy = defaultPolicy;
+      }
+      return policy.shouldRetry(e, retries);
+    }
   }
   
   static class ExponentialBackoffRetry extends RetryLimited {

Modified: lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/TestRetryProxy.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/TestRetryProxy.java?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/TestRetryProxy.java (original)
+++ lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/TestRetryProxy.java Fri May 25 14:36:22 2007
@@ -4,6 +4,7 @@
 import static org.apache.hadoop.io.retry.RetryPolicies.TRY_ONCE_DONT_FAIL;
 import static org.apache.hadoop.io.retry.RetryPolicies.TRY_ONCE_THEN_FAIL;
 import static org.apache.hadoop.io.retry.RetryPolicies.retryByException;
+import static org.apache.hadoop.io.retry.RetryPolicies.retryByRemoteException;
 import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumCountWithFixedSleep;
 import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumCountWithProportionalSleep;
 import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumTimeWithFixedSleep;
@@ -17,6 +18,7 @@
 
 import org.apache.hadoop.io.retry.UnreliableInterface.FatalException;
 import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
+import org.apache.hadoop.ipc.RemoteException;
 
 public class TestRetryProxy extends TestCase {
   
@@ -125,11 +127,26 @@
                         retryByException(RETRY_FOREVER, exceptionToPolicyMap));
     unreliable.failsOnceThenSucceeds();
     try {
-      unreliable.alwaysfailsWithFatalException();
+      unreliable.alwaysFailsWithFatalException();
       fail("Should fail");
     } catch (FatalException e) {
       // expected
     }
   }
+  
+  public void testRetryByRemoteException() throws UnreliableException {
+    Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
+      Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL);
+    
+    UnreliableInterface unreliable = (UnreliableInterface)
+      RetryProxy.create(UnreliableInterface.class, unreliableImpl,
+                        retryByRemoteException(RETRY_FOREVER, exceptionToPolicyMap));
+    try {
+      unreliable.alwaysFailsWithRemoteFatalException();
+      fail("Should fail");
+    } catch (RemoteException e) {
+      // expected
+    }
+  }  
   
 }

Modified: lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableImplementation.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableImplementation.java?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableImplementation.java (original)
+++ lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableImplementation.java Fri May 25 14:36:22 2007
@@ -1,5 +1,7 @@
 package org.apache.hadoop.io.retry;
 
+import org.apache.hadoop.ipc.RemoteException;
+
 public class UnreliableImplementation implements UnreliableInterface {
 
   private int failsOnceInvocationCount,
@@ -10,8 +12,12 @@
     // do nothing
   }
   
-  public void alwaysfailsWithFatalException() throws FatalException {
+  public void alwaysFailsWithFatalException() throws FatalException {
     throw new FatalException();
+  }
+  
+  public void alwaysFailsWithRemoteFatalException() throws RemoteException {
+    throw new RemoteException(FatalException.class.getName(), "Oops");
   }
 
   public void failsOnceThenSucceeds() throws UnreliableException {

Modified: lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableInterface.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableInterface.java?view=diff&rev=541785&r1=541784&r2=541785
==============================================================================
--- lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableInterface.java (original)
+++ lucene/hadoop/branches/branch-0.13/src/test/org/apache/hadoop/io/retry/UnreliableInterface.java Fri May 25 14:36:22 2007
@@ -1,5 +1,7 @@
 package org.apache.hadoop.io.retry;
 
+import org.apache.hadoop.ipc.RemoteException;
+
 public interface UnreliableInterface {
   
   public static class UnreliableException extends Exception {
@@ -12,7 +14,8 @@
   
   void alwaysSucceeds() throws UnreliableException;
   
-  void alwaysfailsWithFatalException() throws FatalException;
+  void alwaysFailsWithFatalException() throws FatalException;
+  void alwaysFailsWithRemoteFatalException() throws RemoteException;
 
   void failsOnceThenSucceeds() throws UnreliableException;
   boolean failsOnceThenSucceedsWithReturnValue() throws UnreliableException;