You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2016/03/09 18:45:33 UTC

[33/34] hadoop git commit: HDFS-9702. DiskBalancer: getVolumeMap implementation. (Contributed by Anu Engineer)

HDFS-9702. DiskBalancer: getVolumeMap implementation. (Contributed by Anu Engineer)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6fc218b3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6fc218b3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6fc218b3

Branch: refs/heads/HDFS-1312
Commit: 6fc218b32e9ead381ebf9fada8120fcc5b174da5
Parents: 78d7ee4
Author: Arpit Agarwal <ar...@apache.org>
Authored: Wed Mar 9 09:44:22 2016 -0800
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Wed Mar 9 09:44:22 2016 -0800

----------------------------------------------------------------------
 .../hadoop/hdfs/server/datanode/DataNode.java   | 17 +++--
 .../hdfs/server/datanode/DiskBalancer.java      | 26 ++++++++
 .../diskbalancer/DiskBalancerException.java     |  3 +-
 .../diskbalancer/TestDiskBalancerRPC.java       | 66 ++++++++++++++++++++
 4 files changed, 107 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6fc218b3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index fb86159..6c08b2c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -169,6 +169,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
 import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
 import org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer;
+import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerConstants;
 import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException;
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
@@ -3339,8 +3340,8 @@ public class DataNode extends ReconfigurableBase
   }
 
   /**
-   * Gets a run-time configuration value from running diskbalancer instance. For
-   * example : Disk Balancer bandwidth of a running instance.
+   * Gets a runtime configuration value from  diskbalancer instance. For
+   * example : DiskBalancer bandwidth.
    *
    * @param key - String that represents the run time key value.
    * @return value of the key as a string.
@@ -3349,7 +3350,15 @@ public class DataNode extends ReconfigurableBase
   @Override
   public String getDiskBalancerSetting(String key) throws IOException {
     checkSuperuserPrivilege();
-    throw new DiskBalancerException("Not Implemented",
-        DiskBalancerException.Result.INTERNAL_ERROR);
+    Preconditions.checkNotNull(key);
+    switch (key) {
+    case DiskBalancerConstants.DISKBALANCER_VOLUME_NAME:
+      return this.diskBalancer.getVolumeNames();
+    default:
+      LOG.error("Disk Balancer - Unknown key in get balancer setting. Key: " +
+          key);
+      throw new DiskBalancerException("Unknown key",
+          DiskBalancerException.Result.UNKNOWN_KEY);
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6fc218b3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
index d5c402e..9e41d2e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.Step;
 import org.apache.hadoop.util.Time;
+import org.codehaus.jackson.map.ObjectMapper;
 
 import java.io.IOException;
 import java.nio.charset.Charset;
@@ -221,6 +222,31 @@ public class DiskBalancer {
     }
   }
 
+  /**
+   * Returns a volume ID to Volume base path map.
+   *
+   * @return Json string of the volume map.
+   * @throws DiskBalancerException
+   */
+  public String getVolumeNames() throws DiskBalancerException {
+    lock.lock();
+    try {
+      checkDiskBalancerEnabled();
+      Map<String, String> pathMap = new HashMap<>();
+      Map<String, FsVolumeSpi> volMap = getStorageIDToVolumeMap();
+      for (Map.Entry<String, FsVolumeSpi> entry : volMap.entrySet()) {
+        pathMap.put(entry.getKey(), entry.getValue().getBasePath());
+      }
+      ObjectMapper mapper = new ObjectMapper();
+      return mapper.writeValueAsString(pathMap);
+    } catch (IOException e) {
+      throw new DiskBalancerException("Internal error, Unable to " +
+          "create JSON string.", e,
+          DiskBalancerException.Result.INTERNAL_ERROR);
+    } finally {
+      lock.unlock();
+    }
+  }
 
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6fc218b3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/DiskBalancerException.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/DiskBalancerException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/DiskBalancerException.java
index 00fe53d..38455a7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/DiskBalancerException.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/DiskBalancerException.java
@@ -36,7 +36,8 @@ public class DiskBalancerException extends IOException {
     INVALID_VOLUME,
     INVALID_MOVE,
     INTERNAL_ERROR,
-    NO_SUCH_PLAN
+    NO_SUCH_PLAN,
+    UNKNOWN_KEY
   }
 
   private final Result result;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6fc218b3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/diskbalancer/TestDiskBalancerRPC.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/diskbalancer/TestDiskBalancerRPC.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/diskbalancer/TestDiskBalancerRPC.java
index e29b3b7..37a6216 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/diskbalancer/TestDiskBalancerRPC.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/diskbalancer/TestDiskBalancerRPC.java
@@ -24,18 +24,24 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus;
+import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException.*;
 import org.apache.hadoop.hdfs.server.diskbalancer.connectors.ClusterConnector;
 import org.apache.hadoop.hdfs.server.diskbalancer.connectors.ConnectorFactory;
 import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster;
 import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.GreedyPlanner;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan;
+import org.hamcrest.*;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.util.HashMap;
+import java.util.Map;
 
 import static org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result.NO_PLAN;
 import static org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result.PLAN_DONE;
@@ -84,6 +90,8 @@ public class TestDiskBalancerRPC {
     int planVersion = rpcTestHelper.getPlanVersion();
     NodePlan plan = rpcTestHelper.getPlan();
     thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.INVALID_PLAN_HASH));
     dataNode.submitDiskBalancerPlan(planHash, planVersion, 10, plan.toJson());
   }
 
@@ -96,6 +104,8 @@ public class TestDiskBalancerRPC {
     planVersion++;
     NodePlan plan = rpcTestHelper.getPlan();
     thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.INVALID_PLAN_VERSION));
     dataNode.submitDiskBalancerPlan(planHash, planVersion, 10, plan.toJson());
   }
 
@@ -107,6 +117,8 @@ public class TestDiskBalancerRPC {
     int planVersion = rpcTestHelper.getPlanVersion();
     NodePlan plan = rpcTestHelper.getPlan();
     thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.INVALID_PLAN));
     dataNode.submitDiskBalancerPlan(planHash, planVersion, 10, "");
   }
 
@@ -131,6 +143,8 @@ public class TestDiskBalancerRPC {
     planHash = String.valueOf(hashArray);
     NodePlan plan = rpcTestHelper.getPlan();
     thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.NO_SUCH_PLAN));
     dataNode.cancelDiskBalancePlan(planHash);
   }
 
@@ -141,9 +155,38 @@ public class TestDiskBalancerRPC {
     String planHash = "";
     NodePlan plan = rpcTestHelper.getPlan();
     thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.NO_SUCH_PLAN));
     dataNode.cancelDiskBalancePlan(planHash);
   }
 
+  @Test
+  public void testGetDiskBalancerVolumeMapping() throws Exception {
+    final int dnIndex = 0;
+    DataNode dataNode = cluster.getDataNodes().get(dnIndex);
+    String volumeNameJson = dataNode.getDiskBalancerSetting(
+        DiskBalancerConstants.DISKBALANCER_VOLUME_NAME);
+    Assert.assertNotNull(volumeNameJson);
+    ObjectMapper mapper = new ObjectMapper();
+
+    @SuppressWarnings("unchecked")
+    Map<String, String> volumemap =
+        mapper.readValue(volumeNameJson, HashMap.class);
+
+    Assert.assertEquals(2, volumemap.size());
+  }
+
+  @Test
+  public void testGetDiskBalancerInvalidSetting() throws Exception {
+    final int dnIndex = 0;
+    final String invalidSetting = "invalidSetting";
+    DataNode dataNode = cluster.getDataNodes().get(dnIndex);
+    thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.UNKNOWN_KEY));
+    dataNode.getDiskBalancerSetting(invalidSetting);
+  }
+
 
   @Test
   public void testQueryPlan() throws Exception {
@@ -173,6 +216,8 @@ public class TestDiskBalancerRPC {
     final int dnIndex = 0;
     DataNode dataNode = cluster.getDataNodes().get(dnIndex);
     thrown.expect(DiskBalancerException.class);
+    thrown.expect(new
+        ResultVerifier(Result.UNKNOWN_KEY));
     dataNode.getDiskBalancerSetting(
         DiskBalancerConstants.DISKBALANCER_BANDWIDTH);
   }
@@ -223,4 +268,25 @@ public class TestDiskBalancerRPC {
       return this;
     }
   }
+
+  private class ResultVerifier
+      extends TypeSafeMatcher<DiskBalancerException> {
+    private final DiskBalancerException.Result expectedResult;
+
+    ResultVerifier(DiskBalancerException.Result expectedResult){
+      this.expectedResult = expectedResult;
+    }
+
+    @Override
+    protected boolean matchesSafely(DiskBalancerException exception) {
+      return (this.expectedResult == exception.getResult());
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText("expects Result: ")
+          .appendValue(this.expectedResult);
+
+    }
+  }
 }