You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2014/08/28 00:23:47 UTC
[20/22] git commit: HDFS-6924. Add new RAM_DISK storage type. (Arpit
Agarwal)
HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5e81c4fa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5e81c4fa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5e81c4fa
Branch: refs/heads/HDFS-6581
Commit: 5e81c4fa57bf785a0d1b1eb75b50d4b2dfa40f50
Parents: 6d12536
Author: arp <ar...@apache.org>
Authored: Wed Aug 27 09:03:45 2014 -0700
Committer: arp <ar...@apache.org>
Committed: Wed Aug 27 15:23:02 2014 -0700
----------------------------------------------------------------------
hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt | 4 ++++
.../src/main/java/org/apache/hadoop/hdfs/StorageType.java | 3 ++-
.../java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java | 4 ++++
hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto | 2 +-
.../org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java | 9 ++++++---
.../apache/hadoop/hdfs/server/datanode/TestDataDirs.java | 5 ++++-
6 files changed, 21 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e81c4fa/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
index 706c03a..fc6e0e0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
@@ -2,3 +2,7 @@
HDFS-6921. Add LazyPersist flag to FileStatus. (Arpit Agarwal)
+ HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal)
+
+
+
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e81c4fa/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
index 3d8133c..51724f7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
@@ -32,7 +32,8 @@ import org.apache.hadoop.classification.InterfaceStability;
@InterfaceStability.Unstable
public enum StorageType {
DISK,
- SSD;
+ SSD,
+ RAM_DISK;
public static final StorageType DEFAULT = DISK;
public static final StorageType[] EMPTY_ARRAY = {};
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e81c4fa/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
index 5efede7..5167597 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
@@ -1703,6 +1703,8 @@ public class PBHelper {
return StorageTypeProto.DISK;
case SSD:
return StorageTypeProto.SSD;
+ case RAM_DISK:
+ return StorageTypeProto.RAM_DISK;
default:
throw new IllegalStateException(
"BUG: StorageType not found, type=" + type);
@@ -1731,6 +1733,8 @@ public class PBHelper {
return StorageType.DISK;
case SSD:
return StorageType.SSD;
+ case RAM_DISK:
+ return StorageType.RAM_DISK;
default:
throw new IllegalStateException(
"BUG: StorageTypeProto not found, type=" + type);
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e81c4fa/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
index cbb51f9..b54638e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
@@ -158,6 +158,7 @@ message FsPermissionProto {
enum StorageTypeProto {
DISK = 1;
SSD = 2;
+ RAM_DISK = 3;
}
/**
@@ -260,7 +261,6 @@ message HdfsFileStatusProto {
// Optional field for fileId
optional uint64 fileId = 13 [default = 0]; // default as an invalid id
optional int32 childrenNum = 14 [default = -1];
-
// Optional field for file encryption
optional FileEncryptionInfoProto fileEncryptionInfo = 15;
optional bool isLazyPersist = 16 [default = false];
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e81c4fa/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
index cb85c7d..98fd59a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
@@ -448,13 +448,16 @@ public class TestPBHelper {
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2",
AdminStates.DECOMMISSIONED),
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3",
- AdminStates.NORMAL)
+ AdminStates.NORMAL),
+ DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h4",
+ AdminStates.NORMAL),
};
- String[] storageIDs = {"s1", "s2", "s3"};
+ String[] storageIDs = {"s1", "s2", "s3", "s4"};
StorageType[] media = {
StorageType.DISK,
StorageType.SSD,
- StorageType.DISK
+ StorageType.DISK,
+ StorageType.RAM_DISK
};
LocatedBlock lb = new LocatedBlock(
new ExtendedBlock("bp12", 12345, 10, 53),
http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e81c4fa/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
index 53babb4..c0b4f9a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
@@ -44,10 +44,11 @@ public class TestDataDirs {
File dir1 = new File("/dir1");
File dir2 = new File("/dir2");
File dir3 = new File("/dir3");
+ File dir4 = new File("/dir4");
// Verify that a valid string is correctly parsed, and that storage
// type is not case-sensitive
- String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3";
+ String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3,[ram_disk]/dir4";
conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1);
locations = DataNode.getStorageLocations(conf);
assertThat(locations.size(), is(4));
@@ -59,6 +60,8 @@ public class TestDataDirs {
assertThat(locations.get(2).getUri(), is(dir2.toURI()));
assertThat(locations.get(3).getStorageType(), is(StorageType.DISK));
assertThat(locations.get(3).getUri(), is(dir3.toURI()));
+ assertThat(locations.get(4).getStorageType(), is(StorageType.RAM_DISK));
+ assertThat(locations.get(4).getUri(), is(dir4.toURI()));
// Verify that an unrecognized storage type result in an exception.
String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2";