You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by to...@apache.org on 2011/08/16 02:37:25 UTC

svn commit: r1158072 [7/7] - in /hadoop/common/branches/HDFS-1623/hdfs: ./ ivy/ src/c++/libhdfs/ src/contrib/ src/contrib/fuse-dfs/ src/java/ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/ser...

Modified: hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNamenodeCapacityReport.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNamenodeCapacityReport.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNamenodeCapacityReport.java (original)
+++ hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestNamenodeCapacityReport.java Tue Aug 16 00:37:15 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.na
 
 import java.io.File;
 import java.util.ArrayList;
+import java.util.List;
 
 import junit.framework.TestCase;
 
@@ -32,6 +33,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
+import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
 
 
 
@@ -59,11 +61,13 @@ public class TestNamenodeCapacityReport 
       cluster.waitActive();
       
       final FSNamesystem namesystem = cluster.getNamesystem();
+      final DatanodeManager dm = cluster.getNamesystem().getBlockManager(
+          ).getDatanodeManager();
       
       // Ensure the data reported for each data node is right
-      ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
-      ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
-      namesystem.DFSNodesStatus(live, dead);
+      final List<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
+      final List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
+      dm.fetchDatanodes(live, dead, false);
       
       assertTrue(live.size() == 1);
       
@@ -112,10 +116,10 @@ public class TestNamenodeCapacityReport 
       
       configCapacity = namesystem.getCapacityTotal();
       used = namesystem.getCapacityUsed();
-      nonDFSUsed = namesystem.getCapacityUsedNonDFS();
+      nonDFSUsed = namesystem.getNonDfsUsedSpace();
       remaining = namesystem.getCapacityRemaining();
-      percentUsed = namesystem.getCapacityUsedPercent();
-      percentRemaining = namesystem.getCapacityRemainingPercent();
+      percentUsed = namesystem.getPercentUsed();
+      percentRemaining = namesystem.getPercentRemaining();
       bpUsed = namesystem.getBlockPoolUsedSpace();
       percentBpUsed = namesystem.getPercentBlockPoolUsed();
       

Modified: hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java (original)
+++ hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java Tue Aug 16 00:37:15 2011
@@ -140,17 +140,9 @@ public class TestStorageRestore {
   /**
    * test
    */
-  public void printStorages(FSImage fs) {
-    LOG.info("current storages and corresponding sizes:");
-    for(Iterator<StorageDirectory> it = fs.getStorage().dirIterator(); it.hasNext(); ) {
-      StorageDirectory sd = it.next();
-
-      File curDir = sd.getCurrentDir();
-      for (File f : curDir.listFiles()) {
-        LOG.info("  file " + f.getAbsolutePath() + "; len = " + f.length());  
-      }
-    }
-  }
+  private void printStorages(FSImage image) {
+    FSImageTestUtil.logStorageContents(LOG, image.getStorage());
+  }    
 
   /**
    * test 

Modified: hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java (original)
+++ hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestStreamFile.java Tue Aug 16 00:37:15 2011
@@ -48,8 +48,8 @@ import org.mockito.Mockito;
 import org.mortbay.jetty.InclusiveByteRange;
 
 /*
-  Mock input stream class that always outputs the current position of the stream
-*/
+ * Mock input stream class that always outputs the current position of the stream. 
+ */
 class MockFSInputStream extends FSInputStream {
   long currentPos = 0;
   public int read() throws IOException {
@@ -198,7 +198,7 @@ public class TestStreamFile {
   }
   
   
-    // Test for positive scenario
+  // Test for positive scenario
   @Test
   public void testDoGetShouldWriteTheFileContentIntoServletOutputStream()
       throws Exception {
@@ -264,9 +264,11 @@ public class TestStreamFile {
     Mockito.doReturn(CONF).when(mockServletContext).getAttribute(
         JspHelper.CURRENT_CONF);
     Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF)))
-        .when(mockHttpServletRequest).getParameter("nnaddr");
+      .when(mockHttpServletRequest).getParameter("nnaddr");
     Mockito.doReturn(testFile.toString()).when(mockHttpServletRequest)
-        .getPathInfo();
+      .getPathInfo();
+    Mockito.doReturn("/streamFile"+testFile.toString()).when(mockHttpServletRequest)
+      .getRequestURI();
   }
 
   static Path writeFile(FileSystem fs, Path f) throws IOException {

Modified: hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java (original)
+++ hadoop/common/branches/HDFS-1623/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java Tue Aug 16 00:37:15 2011
@@ -75,6 +75,7 @@ public class TestNameNodeMetrics extends
   private DistributedFileSystem fs;
   private Random rand = new Random();
   private FSNamesystem namesystem;
+  private BlockManager bm;
 
   private static Path getTestPath(String fileName) {
     return new Path(TEST_ROOT_DIR_PATH, fileName);
@@ -85,6 +86,7 @@ public class TestNameNodeMetrics extends
     cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(DATANODE_COUNT).build();
     cluster.waitActive();
     namesystem = cluster.getNamesystem();
+    bm = namesystem.getBlockManager();
     fs = (DistributedFileSystem) cluster.getFileSystem();
   }
   
@@ -167,7 +169,7 @@ public class TestNameNodeMetrics extends
     // Corrupt first replica of the block
     LocatedBlock block = NameNodeAdapter.getBlockLocations(
         cluster.getNameNode(), file.toString(), 0, 1).get(0);
-    namesystem.markBlockAsCorrupt(block.getBlock(), block.getLocations()[0]);
+    bm.findAndMarkBlockAsCorrupt(block.getBlock(), block.getLocations()[0]);
     updateMetrics();
     MetricsRecordBuilder rb = getMetrics(NS_METRICS);
     assertGauge("CorruptBlocks", 1L, rb);
@@ -188,7 +190,7 @@ public class TestNameNodeMetrics extends
     Path file = getTestPath("testExcessBlocks");
     createFile(file, 100, (short)2);
     long totalBlocks = 1;
-    namesystem.setReplication(file.toString(), (short)1);
+    NameNodeAdapter.setReplication(namesystem, file.toString(), (short)1);
     updateMetrics();
     MetricsRecordBuilder rb = getMetrics(NS_METRICS);
     assertGauge("ExcessBlocks", totalBlocks, rb);
@@ -204,7 +206,7 @@ public class TestNameNodeMetrics extends
     // Corrupt the only replica of the block to result in a missing block
     LocatedBlock block = NameNodeAdapter.getBlockLocations(
         cluster.getNameNode(), file.toString(), 0, 1).get(0);
-    namesystem.markBlockAsCorrupt(block.getBlock(), block.getLocations()[0]);
+    bm.findAndMarkBlockAsCorrupt(block.getBlock(), block.getLocations()[0]);
     updateMetrics();
     MetricsRecordBuilder rb = getMetrics(NS_METRICS);
     assertGauge("UnderReplicatedBlocks", 1L, rb);

Modified: hadoop/common/branches/HDFS-1623/hdfs/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestNNLeaseRecovery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hdfs/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestNNLeaseRecovery.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hdfs/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestNNLeaseRecovery.java (original)
+++ hadoop/common/branches/HDFS-1623/hdfs/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestNNLeaseRecovery.java Tue Aug 16 00:37:15 2011
@@ -455,7 +455,7 @@ public class TestNNLeaseRecovery {
     fsn.leaseManager.addLease("mock-lease", file.toString());
     if (setStoredBlock) {
       when(b1.getINode()).thenReturn(iNFmock);
-      fsn.getBlockManager().blocksMap.addINode(b1, iNFmock);
+      fsn.getBlockManager().addINode(b1, iNFmock);
     }
 
     when(fsDir.getFileINode(anyString())).thenReturn(iNFmock);

Propchange: hadoop/common/branches/HDFS-1623/hdfs/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hdfs/src/webapps/datanode:1152502-1153927
+/hadoop/common/trunk/hdfs/src/webapps/datanode:1152502-1158071
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/datanode:713112
 /hadoop/core/trunk/src/webapps/datanode:776175-784663
 /hadoop/hdfs/branches/HDFS-1052/src/webapps/datanode:987665-1095512

Propchange: hadoop/common/branches/HDFS-1623/hdfs/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hdfs/src/webapps/hdfs:1152502-1153927
+/hadoop/common/trunk/hdfs/src/webapps/hdfs:1152502-1158071
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/hdfs:713112
 /hadoop/core/trunk/src/webapps/hdfs:776175-784663
 /hadoop/hdfs/branches/HDFS-1052/src/webapps/hdfs:987665-1095512

Propchange: hadoop/common/branches/HDFS-1623/hdfs/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hdfs/src/webapps/secondary:1152502-1153927
+/hadoop/common/trunk/hdfs/src/webapps/secondary:1152502-1158071
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/secondary:713112
 /hadoop/core/trunk/src/webapps/secondary:776175-784663
 /hadoop/hdfs/branches/HDFS-1052/src/webapps/secondary:987665-1095512