You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by su...@apache.org on 2012/12/15 17:39:07 UTC

svn commit: r1422281 - in /hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/server/datanode/ src/main/native/ src/main/webapps/datanode...

Author: suresh
Date: Sat Dec 15 16:39:02 2012
New Revision: 1422281

URL: http://svn.apache.org/viewvc?rev=1422281&view=rev
Log:
Merging trunk to branch-trunk-win

Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SocketCache.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/native/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:r1421466-1422280

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1422281&r1=1422280&r2=1422281&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Sat Dec 15 16:39:02 2012
@@ -277,6 +277,14 @@ Trunk (Unreleased)
     HADOOP-8957 HDFS tests for AbstractFileSystem#IsValidName should be overridden for
     embedded file systems like ViewFs (Chris Nauroth via Sanjay Radia)
 
+    HDFS-4310. fix test org.apache.hadoop.hdfs.server.datanode.
+    TestStartSecureDataNode (Ivan A. Veselovsky via atm)
+
+    HDFS-4274. BlockPoolSliceScanner does not close verification log during
+    shutdown. (Chris Nauroth via suresh)
+
+    HDFS-4275. MiniDFSCluster-based tests fail on Windows due to failure
+    to delete test namenode directory. (Chris Nauroth via suresh)
 
 Release 2.0.3-alpha - Unreleased 
 
@@ -608,6 +616,9 @@ Release 2.0.3-alpha - Unreleased 
     HDFS-2264. NamenodeProtocol has the wrong value for clientPrincipal in
     KerberosInfo annotation. (atm)
 
+    HDFS-4307. SocketCache should use monotonic time. (Colin Patrick McCabe
+    via atm)
+
   BREAKDOWN OF HDFS-3077 SUBTASKS
 
     HDFS-3077. Quorum-based protocol for reading and writing edit logs.

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:r1421466-1422280

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SocketCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SocketCache.java?rev=1422281&r1=1422280&r2=1422281&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SocketCache.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/SocketCache.java Sat Dec 15 16:39:02 2012
@@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.protocol.d
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.Daemon;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
 
 /**
  * A cache of input stream sockets to Data Node.
@@ -53,7 +54,7 @@ class SocketCache {
     public SocketAndStreams(Socket s, IOStreamPair ioStreams) {
       this.sock = s;
       this.ioStreams = ioStreams;
-      this.createTime = System.currentTimeMillis();
+      this.createTime = Time.monotonicNow();
     }
     
     @Override
@@ -205,7 +206,7 @@ class SocketCache {
       Entry<SocketAddress, SocketAndStreams> entry = iter.next();
       // if oldest socket expired, remove it
       if (entry == null || 
-        System.currentTimeMillis() - entry.getValue().getCreateTime() < 
+        Time.monotonicNow() - entry.getValue().getCreateTime() < 
         expiryPeriod) {
         break;
       }
@@ -236,13 +237,13 @@ class SocketCache {
    * older than expiryPeriod minutes
    */
   private void run() throws InterruptedException {
-    for(long lastExpiryTime = System.currentTimeMillis();
+    for(long lastExpiryTime = Time.monotonicNow();
         !Thread.interrupted();
         Thread.sleep(expiryPeriod)) {
-      final long elapsed = System.currentTimeMillis() - lastExpiryTime;
+      final long elapsed = Time.monotonicNow() - lastExpiryTime;
       if (elapsed >= expiryPeriod) {
         evictExpired(expiryPeriod);
-        lastExpiryTime = System.currentTimeMillis();
+        lastExpiryTime = Time.monotonicNow();
       }
     }
     clear();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java?rev=1422281&r1=1422280&r2=1422281&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java Sat Dec 15 16:39:02 2012
@@ -602,6 +602,15 @@ class BlockPoolSliceScanner {
       lastScanTime.set(Time.now());
     }
   }
+
+  /**
+   * Shuts down this BlockPoolSliceScanner and releases any internal resources.
+   */
+  void shutdown() {
+    if (verificationLog != null) {
+      verificationLog.close();
+    }
+  }
   
   private void scan() {
     if (LOG.isDebugEnabled()) {
@@ -610,7 +619,8 @@ class BlockPoolSliceScanner {
     try {
       adjustThrottler();
         
-      while (datanode.shouldRun && !Thread.interrupted()
+      while (datanode.shouldRun
+          && !datanode.blockScanner.blockScannerThread.isInterrupted()
           && datanode.isBPServiceAlive(blockPoolId)) {
         long now = Time.now();
         synchronized (this) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java?rev=1422281&r1=1422280&r2=1422281&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java Sat Dec 15 16:39:02 2012
@@ -100,6 +100,11 @@ public class DataBlockScanner implements
       }
       bpScanner.scanBlockPoolSlice();
     }
+
+    // Call shutdown for each allocated BlockPoolSliceScanner.
+    for (BlockPoolSliceScanner bpss: blockPoolScannerMap.values()) {
+      bpss.shutdown();
+    }
   }
 
   // Wait for at least one block pool to be up
@@ -232,9 +237,21 @@ public class DataBlockScanner implements
     }
   }
   
-  public synchronized void shutdown() {
+  public void shutdown() {
+    synchronized (this) {
+      if (blockScannerThread != null) {
+        blockScannerThread.interrupt();
+      }
+    }
+
+    // We cannot join within the synchronized block, because it would create a
+    // deadlock situation.  blockScannerThread calls other synchronized methods.
     if (blockScannerThread != null) {
-      blockScannerThread.interrupt();
+      try {
+        blockScannerThread.join();
+      } catch (InterruptedException e) {
+        // shutting down anyway
+      }
     }
   }
 

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/native/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/native:r1421466-1422280

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode:r1421466-1422280

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs:r1421466-1422280

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary:r1421466-1422280

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs:r1421466-1422280

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java?rev=1422281&r1=1422280&r2=1422281&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java Sat Dec 15 16:39:02 2012
@@ -97,6 +97,7 @@ public class TestBlockRecovery {
     MiniDFSCluster.getBaseDirectory() + "data";
   private DataNode dn;
   private Configuration conf;
+  private boolean tearDownDone;
   private final static long RECOVERY_ID = 3000L;
   private final static String CLUSTER_ID = "testClusterID";
   private final static String POOL_ID = "BP-TEST";
@@ -121,6 +122,7 @@ public class TestBlockRecovery {
    */
   @Before
   public void startUp() throws IOException {
+    tearDownDone = false;
     conf = new HdfsConfiguration();
     conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, DATA_DIR);
     conf.set(DFSConfigKeys.DFS_DATANODE_ADDRESS_KEY, "0.0.0.0:0");
@@ -177,7 +179,7 @@ public class TestBlockRecovery {
    */
   @After
   public void tearDown() throws IOException {
-    if (dn != null) {
+    if (!tearDownDone && dn != null) {
       try {
         dn.shutdown();
       } catch(Exception e) {
@@ -188,6 +190,7 @@ public class TestBlockRecovery {
           Assert.assertTrue(
               "Cannot delete data-node dirs", FileUtil.fullyDelete(dir));
       }
+      tearDownDone = true;
     }
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java?rev=1422281&r1=1422280&r2=1422281&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestStartSecureDataNode.java Sat Dec 15 16:39:02 2012
@@ -17,24 +17,14 @@
 
 package org.apache.hadoop.hdfs.server.datanode;
 
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import static org.apache.hadoop.security.SecurityUtilTestHelper.isExternalKdcRunning;
 import org.junit.Assume;
 import org.junit.Before;
@@ -67,7 +57,7 @@ public class TestStartSecureDataNode {
   }
 
   @Test
-  public void testSecureNameNode() throws IOException, InterruptedException {
+  public void testSecureNameNode() throws Exception {
     MiniDFSCluster cluster = null;
     try {
       String nnPrincipal =
@@ -105,9 +95,9 @@ public class TestStartSecureDataNode {
         .build();
       cluster.waitActive();
       assertTrue(cluster.isDataNodeUp());
-
     } catch (Exception ex) {
       ex.printStackTrace();
+      throw ex;
     } finally {
       if (cluster != null) {
         cluster.shutdown();