You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by cn...@apache.org on 2013/12/23 22:45:04 UTC
svn commit: r1553221 - in
/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/java/org/apache/hadoop/hdfs/protocol/
src/main/java/org/apache/hadoop/hdfs/server/namenode/
src/main/java/org/apache/hadoop/hdfs/tools/offlineImage...
Author: cnauroth
Date: Mon Dec 23 21:45:03 2013
New Revision: 1553221
URL: http://svn.apache.org/r1553221
Log:
HDFS-5618. NameNode: persist ACLs in fsimage. Contributed by Haohui Mai.
Modified:
hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt
hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/LayoutVersion.java
hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java
hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto
Modified: hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt?rev=1553221&r1=1553220&r2=1553221&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4685.txt Mon Dec 23 21:45:03 2013
@@ -16,6 +16,8 @@ HDFS-4685 (Unreleased)
HDFS-5685. Implement ACL as a INode feature. (Haohui Mai via cnauroth)
+ HDFS-5618. NameNode: persist ACLs in fsimage. (Haohui Mai via cnauroth)
+
OPTIMIZATIONS
BUG FIXES
Modified: hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/LayoutVersion.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/LayoutVersion.java?rev=1553221&r1=1553220&r2=1553221&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/LayoutVersion.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/LayoutVersion.java Mon Dec 23 21:45:03 2013
@@ -109,7 +109,8 @@ public class LayoutVersion {
+ "enable rebuilding retry cache in case of HA failover"),
CACHING(-48, "Support for cache pools and path-based caching"),
ADD_DATANODE_AND_STORAGE_UUIDS(-49, "Replace StorageID with DatanodeUuid."
- + " Use distinct StorageUuid per storage directory.");
+ + " Use distinct StorageUuid per storage directory."),
+ EXTENDED_ACL(-50, "Extended ACL");
final int lv;
Modified: hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java?rev=1553221&r1=1553220&r2=1553221&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java Mon Dec 23 21:45:03 2013
@@ -48,6 +48,8 @@ import org.apache.hadoop.fs.permission.P
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature;
+import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclFsImageProto;
+import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
@@ -102,6 +104,7 @@ import org.apache.hadoop.io.Text;
* } when {@link Feature#FSIMAGE_NAME_OPTIMIZATION} is not supported
* replicationFactor: short, modificationTime: long,
* accessTime: long, preferredBlockSize: long,
+ *
* numberOfBlocks: int (-1 for INodeDirectory, -2 for INodeSymLink),
* {
* nsQuota: long, dsQuota: long,
@@ -109,7 +112,11 @@ import org.apache.hadoop.io.Text;
* isINodeSnapshottable: byte,
* isINodeWithSnapshot: byte (if isINodeSnapshottable is false)
* } (when {@link Feature#SNAPSHOT} is supported),
- * fsPermission: short, PermissionStatus
+ * fsPermission: short, PermissionStatus,
+ * AclEntries {
+ * size: int,
+ * protobuf encoding of {@link AclFsImageProto}
+ * }(when {@link Feature#EXTENDED_ACL} is supported),
* } for INodeDirectory
* or
* {
@@ -124,9 +131,12 @@ import org.apache.hadoop.io.Text;
* {clientName: short + byte[], clientMachine: short + byte[]} (when
* isINodeFileUnderConstructionSnapshot is true),
* } (when {@link Feature#SNAPSHOT} is supported and writing snapshotINode),
- * fsPermission: short, PermissionStatus
- * } for INodeFile
- * }
+ * fsPermission: short, PermissionStatus,
+ * AclEntries {
+ * size: int,
+ * protobuf encoding of {@link AclFsImageProto}
+ * }(when {@link Feature#EXTENDED_ACL} is supported),
+ * } for INodeFile,
*
* INodeDirectoryInfo {
* fullPath of the directory: short + byte[],
@@ -696,10 +706,15 @@ public class FSImageFormat {
modificationTime, atime, blocks, replication, blockSize);
if (underConstruction) {
file.toUnderConstruction(clientName, clientMachine, null);
- return fileDiffs == null ? file : new INodeFile(file, fileDiffs);
- } else {
- return fileDiffs == null ? file : new INodeFile(file, fileDiffs);
}
+
+ AclFeature aclFeature = loadAclFeature(in, imgVersion);
+ if (aclFeature != null) {
+ file.addAclFeature(aclFeature);
+ }
+
+ return fileDiffs == null ? file : new INodeFile(file, fileDiffs);
+
} else if (numBlocks == -1) {
//directory
@@ -731,6 +746,12 @@ public class FSImageFormat {
if (nsQuota >= 0 || dsQuota >= 0) {
dir.addDirectoryWithQuotaFeature(nsQuota, dsQuota);
}
+
+ AclFeature aclFeature = loadAclFeature(in, imgVersion);
+ if (aclFeature != null) {
+ dir.addAclFeature(aclFeature);
+ }
+
if (withSnapshot) {
dir.addSnapshotFeature(null);
}
@@ -771,6 +792,18 @@ public class FSImageFormat {
throw new IOException("Unknown inode type: numBlocks=" + numBlocks);
}
+ private AclFeature loadAclFeature(DataInput in, final int imgVersion)
+ throws IOException {
+ AclFeature aclFeature = null;
+ if (LayoutVersion.supports(Feature.EXTENDED_ACL, imgVersion)) {
+ AclFsImageProto p = AclFsImageProto
+ .parseDelimitedFrom((DataInputStream) in);
+ aclFeature = new AclFeature();
+ aclFeature.setEntries(PBHelper.convertAclEntry(p.getEntriesList()));
+ }
+ return aclFeature;
+ }
+
/** Load {@link INodeFileAttributes}. */
public INodeFileAttributes loadINodeFileAttributes(DataInput in)
throws IOException {
Modified: hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java?rev=1553221&r1=1553220&r2=1553221&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java Mon Dec 23 21:45:03 2013
@@ -21,6 +21,7 @@ import java.io.DataInput;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -34,6 +35,8 @@ import org.apache.hadoop.hdfs.protocol.C
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.LayoutVersion;
import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature;
+import org.apache.hadoop.hdfs.protocol.proto.AclProtos.AclFsImageProto;
+import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
@@ -204,6 +207,7 @@ public class FSImageSerialization {
}
writePermissionStatus(file, out);
+ writeAclFeature(file, out);
}
/** Serialize an {@link INodeFileAttributes}. */
@@ -249,8 +253,9 @@ public class FSImageSerialization {
}
writePermissionStatus(node, out);
+ writeAclFeature(node, out);
}
-
+
/**
* Serialize a {@link INodeDirectory}
* @param a The node to write
@@ -282,7 +287,19 @@ public class FSImageSerialization {
Text.writeString(out, node.getSymlinkString());
writePermissionStatus(node, out);
}
-
+
+ private static void writeAclFeature(INodeWithAdditionalFields node,
+ DataOutput out) throws IOException {
+ AclFsImageProto.Builder b = AclFsImageProto.newBuilder();
+ OutputStream os = (OutputStream) out;
+
+ AclFeature feature = node.getAclFeature();
+ if (feature != null)
+ b.addAllEntries(PBHelper.convertAclEntryProto(feature.getEntries()));
+
+ b.build().writeDelimitedTo(os);
+ }
+
/** Serialize a {@link INodeReference} node */
private static void writeINodeReference(INodeReference ref, DataOutput out,
boolean writeUnderConstruction, ReferenceMap referenceMap
Modified: hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java?rev=1553221&r1=1553220&r2=1553221&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java Mon Dec 23 21:45:03 2013
@@ -126,7 +126,7 @@ class ImageLoaderCurrent implements Imag
new SimpleDateFormat("yyyy-MM-dd HH:mm");
private static int[] versions = { -16, -17, -18, -19, -20, -21, -22, -23,
-24, -25, -26, -27, -28, -30, -31, -32, -33, -34, -35, -36, -37, -38, -39,
- -40, -41, -42, -43, -44, -45, -46, -47, -48, -49 };
+ -40, -41, -42, -43, -44, -45, -46, -47, -48, -49, -50 };
private int imageVersion = 0;
private final Map<Long, Boolean> subtreeMap = new HashMap<Long, Boolean>();
Modified: hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto?rev=1553221&r1=1553220&r2=1553221&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/acl.proto Mon Dec 23 21:45:03 2013
@@ -60,6 +60,10 @@ message AclStatusProto {
repeated AclEntryProto entries = 4;
}
+message AclFsImageProto {
+ repeated AclEntryProto entries = 1;
+}
+
message ModifyAclEntriesRequestProto {
required string src = 1;
repeated AclEntryProto aclSpec = 2;